DataPlot::GetDataPoints_TO_RETURN_VECTOR_OF_INDECES* *------------------------------------------------------------------------------*/ #include <origin.

h> // main Origin C header that is precompiled and already include most headers #pragma labtalk(0) //--- CPY 7/26/05 hide all functions in this file from LT access, as XF should be used instead //#include "FunctionGroup.h" #include <FDFTree.h> #include "XFunctionEx.h" #include <ocu.h> #include <oErrMsg.h> #include <okocUtils.h> ///---Sim 06-19-2007 IMPROVE_WARNING_MESSAGE_POST_FUNCTION #include <..\OriginLab\fft.h> #include <fft_utils.h> //#include <ocMath.h> //#include <Range.h> //////////////////////////////////////////////////////////////////////////////////// #define _DBINT(_STR, _INT) //out_int(_STR, _INT); #define _DBMGS(_STR) //out_str(_STR); #define MAX_MATRIX_POINTS 10000000 bool set_curve_input(const curvebase& cuvInput, TreeNode& trInput, int& i0, int& imax, HWND& hWndRet, bool bNeedInit)// = true); { bool bFullRange = cuvInput.GetSourceRange(i0, imax)? false:true; if(bNeedInit) // not supplied, we need to init trInput { set_active_layer(trInput); set_curve(cuvInput, trInput); } else { if(!trInput.Range1.UseRange.IsValid()) trInput.Range1.UseRange.nVal = 0; bFullRange = trInput.Range1.UseRange.nVal > 0? false:true; // we need to set cuvInput active GraphLayer gl = get_graph_layer(trInput); if(gl) { set_active_layer(gl); Page pg = gl.GetPage(); if(pg) hWndRet = pg.GetWindow().GetSafeHwnd(); string str;

str.Format("set %s -a",cuvInput.GetName()); gl.LT_execute(str); } } return bFullRange;

}

// add to TreeNode the X and Y dataset names of the given curve bool set_curve(const curvebase& cuvInput, TreeNode& trNode) { if(!cuvInput) return false; if(!trNode) return false; int i0 = 0, imax = 0; trNode.Range1.UseRange.nVal = cuvInput.GetSourceRange(i0, imax) ? 1 : 0; trNode.Range1.R1.nVal = cuvInput.GetLowerBound(); trNode.Range1.R2.nVal = cuvInput.GetUpperBound(); trNode.Range1.Ydata.strVal = cuvInput.GetName(); Dataset dsX; if(cuvInput.AttachX(dsX)) trNode.Range1.Xdata.strVal = dsX.GetName(); else trNode.Range1.Xdata.strVal = ""; return true; } // add to TreeNode the name of the active graph page and the active layer number bool set_active_layer(TreeNode& trNode) { GraphPage gpg = Project.Pages(); if(gpg) { GraphLayer glyr = Project.ActiveLayer(); trNode.Page.strVal = gpg.GetName(); trNode.Layer.nVal = glyr.GetIndex(); return true; } return false; } //set the given layer to be active layer, if page not open, will open it to be active page as well bool set_active_layer(Layer& layr) { if(!layr) return false; Page pg = layr.GetPage();

if(pg) { pg.SetShow(); string strTemp = "page.active="; strTemp += layr.GetIndex() + 1;//need to labtalk is 1 offset pg.LT_execute(strTemp); return true; } return false; } // retrive the graph layer from the tree node GraphLayer get_graph_layer(const TreeNode& trNode) { GraphLayer gl; if(trNode) { TreeNode trPage = trNode.Page; if(trPage) { GraphPage gpg(trPage.strVal); if(gpg) { TreeNode trTemp = trNode.Layer; if(trTemp) { int nIndex = trTemp.nVal; if(nIndex >= 0 && nIndex < gpg.Layers.Count()) { gl = gpg.Layers(nIndex); return gl; } } } } } return gl; } int curve_in_layer_get_index(const curvebase& cuv, const GraphLayer& gl) { if (!cuv.IsValid()) //Leo 08/26/05 QA70-6960 RUN_TIME_ERROR_WHEN_CURVE_IS_INVALID return -1; int ii = 0; foreach(DataPlot dp in gl.DataPlots) {

const curvebase& cuv2. . GraphLayer& glayerFound) { //---// starting layer given. gl). foreach(GraphLayer gl in gpg. if(strName. if (pnLayerIndex) *pnLayerIndex = -1.string strName = dp.GetName()) == 0) return ii. if (0 <= nDPIndex) { if (pnLayerIndex) *pnLayerIndex = nLayer.CompareNoCase(cuv. // Search the layer where both two curves are both plotted //glayerFound is layer where cuv2 is currently plotted bool curve_both_in_layer(const curvebase& cuv1. } BOOL curve_in_page_get_indices(const curvebase& cuv. if (pnDataPlotIndex) *pnDataPlotIndex = -1. } return FALSE. then we should not search if cuv2 already in that layer and // we can assume this is the layer we want if(glayerFound && curve_in_layer_get_index(cuv2.Layers) { int nDPIndex = curve_in_layer_get_index(cuv. glayerFound) < 0? false:true. const GraphPage &gpg. } return -1. int *pnLayerIndex. if (pnDataPlotIndex) *pnDataPlotIndex = nDPIndex. ii++.GetDatasetName(). glayerFound) >= 0) return curve_in_layer_get_index(cuv1. return TRUE. } } ++nLayer. int *pnDataPlotIndex) { int nLayer = 0.

Empty().//---foreach (GraphPage pg in Project. . return strWks.HasX(strTemp)) { nn = strTemp.Empty(). strY.Layers) { int nCuv2 = curve_in_layer_get_index(cuv2. } string curve_get_wks_col_names(curvebase& cc.GraphPages) { foreach(GraphLayer gl in pg.Left(nn). string strWks. int nCuv1 = curve_in_layer_get_index(cuv1. strX = strTemp.Find('_').GetName(strTemp). if(cc. glayerFound = gl.Mid(nn+1). strX.Empty(). gl). if(!wks) { strWks. Worksheet wks(strWks). string& strX. if(nn > 0) strWks = strTemp. } } return strWks.Mid(nn+1). string& strY) { string strTemp. if(cc) { cc. } } return false.Find('_'). int nn = strTemp. if(nCuv2 < 0) continue. } strY = strTemp. if(nCuv1 >= 0) return true. gl).

.SetFormat(OKCOLTYPE_NUMERIC).Columns(strX). if(glyr) { string strY. string strWks = curve_get_wks_col_names(Project. return true. } /** >Analysis duplicate the active curve in the active graph layer so that analysis routine like derivatives and smoothing can be performed on the copied curve Parameters: lpcszNewColName = name of the Y column for the copied curve bReuseIfInWks = option to always create new copy of can reuse if named column is already in the same worksheet as the original Return: NULL if no active graph layer with active curve found. strX. string strNewCol = lpcszNewColName.ActiveCurveBase(). if(!bReuseIfInWks || !cNew. Column cx = wks.GetFormat(). bool bSetAsY = true) { int nFormat = cc.SetType(OKDATAOBJ_DESIGNATION_Y).IsEmpty()) strNewCol = "A". if(nFormat != OKCOLTYPE_NUMERIC && nFormat != OKCOLTYPE_TEXT_NUMERIC) { cc. if(strNewCol.GetType() != OKDATAOBJ_DESIGNATION_Y) cc.Columns(strY). bool bReuseIfInWks) //= NULL. or if the operation failed */ curvebase& curve_duplicate_active(LPCSTR lpcszNewColName .Pages(). } return false.} static bool check_set_col_analysis(Column& cc. Column cNew = wks. if(gpg) { GraphLayer glyr = Project.Columns(strNewCol).IsEmpty()) { Worksheet wks(strWks). strX. if(bSetAsY && cc.ActiveLayer(). Column cy = wks. if(!strWks.IsValid()) strY). = false { GraphPage gpg = Project.

dp. gpg)) // not present { GraphLayer grl = gpg. } } string strTemp. if (!grl) return false. grl. wks.SetColor(nColor). } } return NULL. GraphLayer &grl. return true.Rescale().GetIndex(). } return true.GetIndex()+1.Columns(strNewCol).InsertCol(cy. // find the input curve layer and plot the result curve .GetCurve(cx. cNew = wks.Layers(0). nColor.Refresh(). GraphPage &gpg.AddPlot(cuv.GetIndex()). strNewCol. strNewCol = strTemp.GetUpperBound()). int nColor. gpg. bool bRescale) if (!curve_in_page_get_indices(cuv. } /** */ bool { curve_update_in_page(curvebase& cuv. int nColor.{ } // we need to make sure new col is a Y col check_set_col_analysis(cNew). nPlotType). bool bRescale. cNew. return wks.DataPlots(nPlot). bool add_curve_to_graph_layer(curvebase& cuv. bRescale).SetUpperBound(cy. int nPlotType) { int nPlot = grl. cNew. if( bRescale ) grl. DataPlot dp = grl. strTemp). } else return add_curve_to_graph_layer(cuv.

int iZsize = vecZ. double& dXmax. } return false. so we will make the plot int nPlot = gl.DataPlots(nPlot). iYNumSteps. string strWksName. // Create a temporary worksheet.GetSize().AddCol(). double& dYmin. // support only matrix size of greater than 2x2 if( (iXsize != iYsize) || (iYsize != iZsize) || (iZsize != iXsize) ) return 1. jj. nColor. with prefix ocmath_. bool bCheckData) { bool bRet.Create("Origin. wksTemp. if(iXsize < 4) return 1. int nColor. dXStep. cuvInput.. wksTemp.AddPlot(cuvFit.bool curve_update(curvebase& cuvFit. const TreeNode& trInput.Z vectors are of equal length .Y. dYStep. int ii.SetColor(nColor). bool bRescale) // bRescale = false { GraphLayer gl = get_graph_layer(trInput). iYStepLoc.return if not int iXsize = vecX. const curvebase& cuvInput. if(!curve_both_in_layer(cuvFit. bRet = wksTemp. we will just use 1st for now { /* // cuvInput plotted in gl but cuvFit not.otw". if( bRescale ) gl. gl) && gl) // there maybe many. } ///Frank 4/7/05 OC_CONVERT_MATRIX_FUNCTION_MOVE_TO_VC_LEVEL_WITH_PREFIX_OCMATH ///These functions are replace by VC level functions. bRescale). double dev. id. double& dYmax. matrix& matData. vector& vecY. dp. iRet. iXStepLoc. wksTemp. IDM_PLOT_LINE). /* int convert_regular_xyz_to_matrix(vector& vecX.AddCol(). CREATE_TEMP). wksTemp.DeleteCol(0).Rescale().DeleteCol(0). so the OC code should be comment. double& dXmin.GetSize(). vector& vecZ..GetPage(). . // Check if X.GetSize(). and fill in the vectors into first three columns Worksheet wksTemp. isize. wksTemp. gl. iXNumSteps. */ return add_curve_to_graph_layer(cuvFit. DataPlot dp = gl.GetName(strWksName). int iYsize = vecY.

dYStep).c2 = 3. wksTemp. dsZ = vecZ. if(iRet != 0) return 4. Dataset dsZ(wksTemp. dYStep). then. wksTemp.. if(iRet != 0) return 2. iXNumSteps. dXStep).c1 = 1. // Do a check on whether the x and y data groups agree // Number of groups in one should match group length in the other if ( (iXStepLoc != iYNumSteps) | (iYStepLoc != iXNumSteps) ) return 3. // Find step location. number of steps.wksname$ = strWksName. wksTemp. // Find step location. 2). Dataset dsX(wksTemp. dXStep). dsX = vecX. wksTemp. sort. Dataset dsYMed(wksTemp.cname1$ = "A: B". if(iRet != 0) return 2. and step value for x data iRet = convert_regular_find_step(0. // sort only first 3 cols sort. if(iRet != 0) return 4. } // Now sort the worksheet wrt Y ascending as primary and and // X ascending as secondary . wksTemp. iYNumSteps. iYStepLoc. . iXNumSteps. 1).cname2$ = "A: A".AddCol(). sort. 4).. Dataset dsY(wksTemp. iXStepLoc. number of steps. dsY = vecY.this format is needed to copy to matrix using sort = LabTalk. sort. if(bCheckData) { // Check for x data for deviations iRet = convert_regular_check_data(0. // Create two more columns in temp worksheet for computing x/y step values wksTemp.AddCol().wksTemp. sort.r1 = 1. iYStepLoc. sort.r2 = isize. 3).sort. sort.wks(). iXStepLoc. iYNumSteps.AddCol(). and step value for y data iRet = convert_regular_find_step(1. Dataset dsXMed(wksTemp. 0). sort. // If data should be checked for iregularity. // Check for y data for deviations iRet = convert_regular_check_data(1.

GetSize(). int iMethod. double dWFL) { int i. NULL)) != 0) iErr = iRet. nx.// Now fill matrix with Z data. } #ifdef _O_NAG_H int convert_random_xyz_to_matrix_nag(vector& vecX. dYmin = min(dsY). yhi. leave rnq as is . &comm. // Success return 0. // Get number of scatter points. iRet. dXmax = max(dsX). // Call the appropriate interpolation routine if (iMethod == 0) { // Renka-Cline method if( (iRet = nag_2d_scat_interpolant(method. // Scale defaults with values passed by user. ylo. vecX. vector& vecY. double xlo.0. double rnq = -1. if(iMethod == 0) method = Nag_RC. double nw = 12.0. ny. max values for use in setting matrix co-ordinates dXmin = min(dsX). Nag_2d_Scat_Method method. j. iYStepLoc). double& dYmin. m. xhi. } else { // Default values of parameters for Shepard's method double nq = 24. else method = Nag_Shep.SetByVector(dsZ).SetSize(iXStepLoc. double dQIL. iErr = 0. // Get min.0. n. matrix& matResult. and return coordinate values in variables passed matData. nx = 2 * sqrt(m). double& dYmax. Nag_E01_Opt optional. double& dXmin. // Fill matrix by just using Z dataset matData. vector& vecZ. // Set up NAG structures for calling gridding routine Nag_Scat_Struct comm. m. ny = nx. dYmax = max(dsY). and compute grid size accordingly m = vecZ. vecY. double& dXmax. vecZ.

1) / (nx . ylo = min(vecY). optional.nq *= dQIL.i .1)) * xhi. xhi = max(vecX). for (j = 0. vecGY.1) / (ny . yhi = max(vecY). } // Get lo and hi values for x and y data xlo = min(vecX). vecY.1)) * ylo + (1. vecGY. m. j < ny. } // Clean up //--.nw = nw.SetSize(nx * ny).CPY 8/4/03 saw this when doing build 644 of Origin 75 //iRet = nag_2d_scat_free(&comm). &optional)) != 0) iErr = iRet. // Define vectors for gridded data. i < nx.0 * j / (ny . optional. if( (iRet = nag_2d_scat_interpolant(method.0 * i / (nx . ++i) { vecGX[i + nx * j] = (1.1)) * yhi.] nag_2d_scat_free(&comm). vecGZ. // Compute positions of the grid points using the hi and lo scatter values n = 0. ++j) { for (i = 0.SetSize(nx * ny). vecGY.1)) * xlo + (1.SetSize(nx * ny).0 * (nx . vecGZ. } } // Evaluate two-dimensional interpolant function computed by the interpolant function call if( (iRet = nag_2d_scat_eval(&comm.rnq = rnq. and set their size vector vecGX.nq = nq. vecGX. vecZ. n. &comm. vecGY[i + nx * j] = (1.0 * (ny . // Quadratic Interpolant Locality nw *= dWFL. vecGX. vecX. //--// Take gridded data and create a matrix by calling regular conversion if((iErr == -251)||(iErr == -249)||(iErr == 0)) { . ++n.j . vecGZ)) != 0) { if(iRet != 179) iErr = iRet. // Weight Function Locality // Set these values for the conversion method optional.

if( (iXsize != iYsize) || (iYsize != iZsize) || (iZsize != iXsize) ) return -1.GetSize(). if(iSize > MAX_MATRIX_POINTS) return -2. dYmin.Y. vector& vecY.Z vectors are of equal length . vecGY. int iZsize = vecZ. may be wrong dsXresult. } // Return iErr return iErr.DeleteCol(0).AddCol(). int iYsize = vecY.if( ( iRet = convert_regular_xyz_to_matrix(vecGX. 1). int iPrecision) { waitCursor hourGlass. double dYbegin. matrix& matResult.SetSize(iSize).SetSize(iSize). // Create a temporary worksheet to hold result data Worksheet wksTemp. wksTemp. dXmax.GetSize(). int iylen = nint( 1. vector& vecZ.DeleteCol(0). Dataset dsYresult(wksTemp. int ixlen = nint( 1.0 + (dXend . matResult. wksTemp.step size . double dYend. 2). double dXend. Dataset dsZresult(wksTemp. wksTemp. count.otw".dYbegin) / dYstep). // Compute length for temp worksheet columns int ii.0 + (dYend . } #endif //#ifdef _O_NAG_H int convert_sparse_xyz_to_matrix(vector& vecX. bool bRet = wksTemp. dXmin. wksTemp. double dYstep.Create("Origin. jj. int iSize = ixlen *iylen. Dataset dsXresult(wksTemp.return if not int iXsize = vecX. CREATE_TEMP).dXbegin) / dXstep).AddCol(). // too may points . wksTemp. 0). false)) != 0) iErr = -1.AddCol().GetSize(). vecGZ. dsYresult.GetName(strWksName). // Check if X. double dXbegin. dYmax. string strWksName. double dXstep.GetPage(). wksTemp.

for(int ij = ixRow. ij++) { if(dsXresult[ij] != dXFound) break.1). iPrecision). // Find the first y value that matches. z values double dX = vecX[ii].y. } } for (ii = 0. // Fill Z with missing values to start with dsZresult = NANUM. double dY = vecY[ii]. &dsYresult. for (ii=0.SetSize(iSize). . continue to look for y value occurance if(ixRow != -1) { // Find the range of rows with this x value double dXFound = dsXresult[ixRow]. in the above range int iyRow = Data_list(dY. &dsXresult. ii++) { for (jj = 0. jj++) { dsXresult[ii * iylen + jj] = dXbegin + ii * dXstep. y columns of temp worksheet with cyclical values for (ii = 0. ii < ixlen. count=0. dsYresult. ii++) { for (jj = 0. iPrecision). // Fill x. ij < ixRow + iylen. // Find the first occurance of the x value in the temporary worksheet int ixRow = Data_list(dX. jj < iylen. double dZ = vecZ[ii]. ii++) { // Get input x.SetLowerBound(ixRow). jj < iylen. } // Set upper and lower bounds of y dataset to range of x matches found dsYresult. // If found.dsZresult. jj++) { dsYresult[ii * iylen + jj] = dYbegin + jj * dYstep.SetUpperBound(ij . } } // Now read thru data in temp worksheet and try filling result vectors int ilen = iXsize. ii < ilen. ii < ixlen.

} // If no match for x. // Use the second-last value in this sorted list as the guess for step size dStep = dsTemp[iSize . } // Now fill matrix by using Z dataset matResult. double& dStep) { // Get min and max values Dataset dsTemp(vec). increment count of bad points else count++. str = dsTemp.Sort(). // Sort datset. double& dMax.row // If match found. . dMax = max(dsTemp). // Return number of discarded points return count.SetLowerBound(0).2].Sort(). string str. LT_execute(str). and sort again dsTemp. // Reset bounds on y dataset dsYresult.SetByVector(dsZresult. matResult. increment count of bad points else count++. dsTemp. // If no match for y. double& dMin. } int convert_sparse_find_min_max_step(vector& vec.false).GetName()+"=diff("+dsTemp.1). put in the z value into the temporary worksheet at this if(iyRow != -1) dsZresult[iyRow] = dZ.GetSize(). find diff.SetUpperBound(iSize . ixlen).".SetSize(iylen. dMin = min(dsTemp). int iSize = dsTemp. dsYresult. } return 0.GetName() + ").

otw". wksTemp. wksTemp. sort. vecY. // if size less than 2.sort. // Create temp worksheet and put xyz data into wks Worksheet wksTemp. sort. vecZ. nSourceRow = 0.c2 = 3.cname2$ = "A: B".SetSize(0).GetSize().DeleteCol(0).c1 = 1. // sort the first 3 cols sort. sort. 0). while( nSourceRow < dsX.GetPage().GetSize(). Dataset dsX(wksTemp.GetSize() ) { vecX. sort. vector& vecY. vecY. // Loop thru data. dsZ = vecZ.Add(dsZ[nSourceRow]). string strWksName.wks().wksname$ = strWksName. vecZ.AddCol().r1 = 1. sort. dsY = vecY. dsX = vecX. sort. 2). vector& vecZ) { // Check if X. CREATE_TEMP). // Reset vectors to zero size vecX. wksTemp. Dataset dsZ(wksTemp.Add(dsY[nSourceRow]).AddCol(). Dataset dsY(wksTemp.return if not int iXsize = vecX. sort. no need to do anything if( (iXsize != iYsize) || (iYsize != iZsize) || (iZsize != iXsize) ) return 1.r2 = iXsize. int iZsize = vecZ.DeleteCol(0).GetName(strWksName). bool bRet = wksTemp. int iYsize = vecY.Add(dsX[nSourceRow]).int xyz_remove_duplicates(vector& vecX. wksTemp. look for duplicates.Y.AddCol(). 1). wksTemp. .Create("Origin. nTargetRow = 0. if(iXsize < 2) return 0. and replace with mean value int nDupRow.cname1$ = "A: A".SetSize(0). wksTemp.Z vectors are of equal length .GetSize(). // Sort the worksheet wrt X ascending as primary and and Y ascending as secondary using sort = LabTalk.SetSize(0).

wksname$ = strWksName.GetPage(). iType). using sort = LabTalk.cname1$ = "A: A". sort.r2 = iSize. sort.cname2$ = "A: B". // Returns 0 for sucess. iSize. sort. iType + 3). int ii. int& iStepLoc. } } return 0.c2 = 3.nDupRow + 1). // this is defined as a step. nRow <= nSourceRow. // Sort worksheet wrt X/Y primary and Y/X secondary depending on iType value of 0/1 string strWksName. if( nDupRow != nSourceRow ) { // Replace duplicates with mean value for( int nRow = nDupRow + 1. wks. and if the difference // in the middle is greater than twice the difference at first and third points. sort. double& dStep) { Dataset dsData(wks.GetName(strWksName).GetSize(). // To find a step.nDupRow = nSourceRow. number of steps. int& iStepNum.GetSize() && dsX[nSourceRow] == dsX[nSourceRow + 1] && dsY[nSourceRow] == dsY[nSourceRow + 1] ) nSourceRow++.r1 = 1. iSize = dsData. if(iType == 0) { sort. vecZ[nTargetRow] /= (nSourceRow . . //////////////////////////////////////////////////////////////////////////////////// // This sub function is used by regular xyz to matrix conversion function // This function first finds the step location within the dataset. // sort only first 3 cols sort. and the median step value. Dataset dsMed(wks. three successive differences are examined. sort. } nSourceRow++.sort.c1 = 1. while( nSourceRow + 1 < dsX. 1 if no step could be found // static int convert_regular_find_step(int iType. nRow++ ) vecZ[nTargetRow] += dsZ[nRow]. Worksheet& wks. nTargetRow++. and returns the // step location.

ii++) { dsMed[ii] = fabs(dsData[ii * iStepLoc + iStepLoc / 2] . // Success return 0. // Compute the step values and store them in temporary worksheet dsMed.wks(). for(ii = 0.cname1$ = "A: B". sort.3)) return 1. else sort. sort.dsData[ii+2]).3).1). } // Sort these step values and pick the median value for the final x/y step size sort. iStepLoc = ii + 2.} else { } sort.r2 = iStepNum . for(ii=0. if(iType == 0) sort. step3.dsData[ii+3]). step2. // Determine the location of the first step in data double step1. sort. dStep = dsMed[(iStepNum . if((step2 > 2 * step1) && (step2 > 2 * step3)) break.c2 = 4 + iType. sort. step3 = fabs(dsData[ii+2] .SetSize(iStepNum . // Determine number of groups within X data iStepNum = iSize / iStepLoc.cname2$ = "A: A". ii < (iStepNum .1.dsData[(ii + 1) * iStepLoc + iStepLoc / 2]). } if (ii == (iSize .1) / 2]. step2 = fabs(dsData[ii+1] .dsData[ii+1]).r1 = 1.cname1$ = "A: E". sort. sort. } . ii++) { // compute differences at three points step1 = fabs(dsData[ii] .wks().cname1$ = "A: D".wksname$ = strWksName. // sort only the 4th/5th col sort.c1 = 4 + iType. ii < (iSize .1).

cname1$ = "A: B". } // Now go thru all groups of X/Y data and check for deviations. // Returns 0 on success.cname1$ = "A: A". sort.GetName(strWksName). ii < iStepNum. for(ii=0.GetPage(). Dataset dsMed(wks. ascending string strWksName. sort. double dStep) { Dataset dsData(wks.//////////////////////////////////////////////////////////////////////////////////// // This sub function is used by regular xyz to matrix conversion function.r2 = iSize. } else { sort. Worksheet& wks.wksname$ = strWksName.c1 = 1. jj. ii++) { dsMed[ii] = dsData[iStepLoc / 2] + dStep * ii. sort. iType + 3).c2 = 3. or 1 if a large deviation is found // static int convert_regular_check_data(int iType.sort. iSize = dsData. wks.SetSize(iStepNum). ii++) . int iStepNum. // This function checks all data points to find deviations. if(iType == 0) { sort. // If a deviation is larger than 1/4 th of step size. // Replace deviated values with median value for that group. // Sort worksheet wrt X/Y. int iStepLoc. the data is // rejected. // If the deviation in any point is larger than 1/4th the step size.GetSize(). iType). int ii.cname2$ = "A: A".r1 = 1.wks(). sort. reject data and return for(ii = 0. sort. using sort = LabTalk. // sort only first 3 cols // Build list of median values for X/Y groups by taking medain value of first // group and then using the X/Y step value to compute the rest dsMed. ii < iStepNum. } sort. iSize. sort. sort.cname2$ = "A: B".

Column &col) . int nDesiredColumnPosition. } } } */ ///End return 0. double dev = fabs(dsData[id] . If >= 0. OC_CONVERT_MATRIX_FUNCTION_MOVE_TO_VC_LEVEL_WITH_PREFIX_OCMATH ///---Dancie 9/9/04 REPLACE_DATASET_WITH_VECTORBASE //////////////////////////////////////////////////////////////////////////////////// // These functions find min and max values of a vector. and are used // in random_gridding_nag function. return max(ds). Paramaters: wks = the worksheet to add the column nDesiredColumnPosition = the desired column position (0 offset).25 * dStep) return 1. return min(ds). col= [out] the added column Returns: the actual index of the added column. jj < iStepLoc. which can be different from nDesiredColumnPosition even if nDesiredColumnPosition >= 0. dsData[id] = dsMed[ii]. but the current total number of columns is less than nDesiredColumnPosition. the column will also be appended. */ int wks_insert_column(Worksheet &wks. the column will be appended. } static double max(vector& vec) { Dataset ds(vec). If less than 0. if(dev >= 0. // Should be replaced by methods in vector class when that is added // /* static double min(vector& vec) { Dataset ds(vec). jj++) { int id = ii * iStepLoc + jj.{ for(jj = 0. } */ ///--/** It appends or inserts a column to worksheet at a desired position.dsMed[ii]).

strSheet). return okutil_make_book_sheet_string(strBook. break down and return book and sheet bool get_book_sheet_names(const string& strBookSheet.AddCol(). . if(strBookSheet[0] != '[') return false. return colindex.Columns(colindex). strColNameCreated). NULL.Find(']'. string& strSheet) { /// ML 1/13/2006 XVARIABLEBASE_TO_VC /* if(strBookSheet. 0.Columns(nDesiredColumnPosition). if (nCountColumns < nDesiredColumnPosition) nDesiredColumnPosition = nCountColumns. //return str. const string& strSheet) { /// ML 1/13/2006 XVARIABLEBASE_TO_VC //string str. strSheet).InsertCol(nDesiredColumnPosition. // Insert column: //wks.GetLength() < 4) return false. int nClosing = strBookSheet. col = wks. string& strBook. TRUE). strBook.{ int colindex = -1. nDesiredColumnPosition. if (0 <= nDesiredColumnPosition) // if not appending { int nCountColumns = wks. 1). wks. col = wks. if yes. //str. colindex = nDesiredColumnPosition. nDesiredColumnPosition.CPY 3/4/04 v7.DeleteRange(-1.Format("[%s]%s". string strColNameCreated.5831 QA70-6082 GET_SELECTION_AS_RANGE_STRS // return string in the form of [Data1]Sheet1 string make_book_sheet_name(const string& strBook. /// end XVARIABLEBASE_TO_VC } // check given strBookSheet to see if in the form of [Data1]Sheet1. //--.GetNumCols(). } else { } } // Append: colindex = wks.

if(nClosing < 3) return false; strBook = strBookSheet.Mid(1, nClosing - 1); strSheet = strBookSheet.Mid(nClosing + 1); return true; */ return okutil_get_book_sheet_names(strBookSheet, &strBook, &strSheet); /// end XVARIABLEBASE_TO_VC

} string wks_get_book_sheet_name(const Datasheet& ds) { string str; if(!ds) return str;

/// ML 1/13/2006 XVARIABLEBASE_TO_VC /* Page wp; ds.GetParent(wp); string strBook = wp.GetName(); string strSheet = ds.GetName(); return make_book_sheet_name(strBook, strSheet); */ str = ds.m_strBookSheet; return str; /// end XVARIABLEBASE_TO_VC } bool wks_from_book_sheet_name(Worksheet& wks, const string& strBookSheet) { string strBook,strSheet; if(!get_book_sheet_names(strBookSheet, strBook, strSheet)) return false; WorksheetPage wp(strBook); if(!wp) return false; wks = wp.Layers(strSheet); return wks.IsValid()? true:false;

} //---

///--- CPY 3/14/04 QA70-6036 v7.5837b BASIC_SMOOTHING_ROUTINES bool curve_smooth_adjave(curvebase& cuv, int leftpts) { vector vy, vt; vy = cuv; vt = vy; ///Mouqx/Leo 2005-9-26 QA70-8110 TO_CENTRALIZE_ERROR_CODES_IN_OCMATH

}

//bool bRet = ocmath_adjave_smooth(vy, vt, vy.GetSize(), leftpts); //if(bRet) int nRet = ocmath_adjave_smooth(vy, vt, vy.GetSize(), leftpts); if (nRet == OE_NOERROR ) { cuv = vt; return true; } return false;

bool curve_smooth_sg(curvebase& cuv, int leftpts, int nDeg) // = 2 { vector vy, vt; vy = cuv; vt = vy; ///Sandy 2006-8-7 CHANGE_TO_USE_STANDARD_ERROR_CODE //bool bRet = ocmath_savitsky_golay(vy, vt, vy.GetSize(), leftpts, leftpts, nDeg); int nRet = ocmath_savitsky_golay(vy, vt, vy.GetSize(), leftpts, leftpts, nDeg); if(nRet == OE_NOERROR) { cuv = vt; return true; } return false; } ///--///--- SDB 4/14/04 QA70-6035 v7.5837 CALCULUS_ROUTINES bool curve_derivative(curvebase& cuv) { Dataset dsX; cuv.AttachX(dsX); vector vx, vy; vx=dsX; vy = cuv; ///Leo 2005-9-26 QA70-8110 TO_CENTRALIZE_ERROR_CODES_IN_OCMATH //bool bRet = ocmath_derivative(vx, vy, vy.GetSize()); //if(bRet) int nRet = ocmath_derivative(vx, vy, vy.GetSize()); if (nRet == OE_NOERROR) { cuv = vy; return true; } return false; } ///--/// Leo 2005-12-16 MOVED_PEAK_AND_BASLINE_FUNCS_TO_PFM_UTILS

//----------- CPY 4/3/05 SIMPLE_GRAPH_ANSLYSIS_FUNCTIONS //--- Iris 9/17/05 move to Analysis_utils.h //#define STR_COL_DESIGNATION_ATTRIB "ColDesignation" //#define STR_COL_WIDTH_ATTRIB "ColWidth" //--// assume tr as no branch, only leaf nodes int out_tree_to_wks(const TreeNode& tr, Worksheet& wks) { ///------ Fisher 2008-9-24 WHEN_THE_FIRST_COLUMN_HAS_NO_DATA_THE_WKS_SIZE_WILL_NOT_INCREASE //Column c1 = wks.Columns(0); //if(c1) // nRow = c1.i2 + 1; int nR1, nR2 = -1; wks.GetRange(nR1, nR2, 0, -1, GDR_NO_ADJUST, GDR_SKIP_MISSING_BLANK); int nRow = nR2 + 1; ///------ End WHEN_THE_FIRST_COLUMN_HAS_NO_DATA_THE_WKS_SIZE_WILL_NOT_INCREASE bool bUnitsNeeded = false; //out_int("nRow = ", nRow); foreach(TreeNode trN in tr.Children) { //--- CPY 9/23/08 CURVE_INTEG_TOOL_OUTPUT_TO_WKS_USING_STANDARD_RERPOT_TREE if(trN.Show == false) continue; //--//---- CPY 11/9/06 PCLAMP_PLOT_BUTTON_TO_ADD_TO_WKS //Column cc = wks.Columns(trN.tagName); Column cc; string strLongName; if(trN.GetAttribute(STR_LABEL_ATTRIB, strLongName) && strLongName.GetLength() > 0) cc = wks.FindCol(strLongName, 0, false, true, -1, false); else cc = wks.Columns(trN.tagName); //---if(!cc) { int nCol = wks.AddCol(trN.tagName); cc.Attach(wks, nCol); int nDesignation = OKDATAOBJ_DESIGNATION_NONE; trN.GetAttribute(STR_COL_DESIGNATION_ATTRIB, nDesignation); cc.SetType(nDesignation); int nWidth = 0; trN.GetAttribute(STR_COL_WIDTH_ATTRIB, nWidth); if(nWidth != 0)

} //---} int nn = cc. vector<uint>& vecFCount. vy.SetWidth(nWidth). //---. if(nBinRule == BIN_RULE_STURGES) // do this here instead of calling ocmath_bin_width as inside it will get min max again.CPY 11/9/06 PCLAMP_PLOT_BUTTON_TO_ADD_TO_WKS { // we assume wks was prepared with just showing long name.SetUnits(str).GetIndex(). nn. nColWidth). } bool frequency_count(const vector& vy. double& dbinW. string str.dVal).CPY 11/9/06 PCLAMP_PLOT_BUTTON_TO_ADD_TO_WKS if(strLongName. so if units needed. str) && str.CheckAddLabelByType(RCLT_UNIT). bUnitsNeeded = true. a waste nBins = 1. if(nColWidth != 0) tr. else wks.SetCell(nRow. int nBinRule) { if(nBinRule < BIN_RULE_FREEDMAN_DIACONIS || nBinRule > BIN_RULE_STURGES) return false. void tree_set_add_col_info(TreeNode& tr.SetLongName(strLongName).TypeID == TNVAL_TYPE_CSTRING || trN. } } if(bUnitsNeeded)//---. int nColWidth)// = 0) { if(nDesignation >= 0) tr. } } return nRow.cc. we will need to show it too wks.5 + log(vy.GetSize())/log(2). int nBins. if(nn >=0) { if(trN.IsEmpty()) wks.strVal. nDesignation). trN. int nDesignation. double& min. trN. if(trN.SetAttribute(STR_COL_DESIGNATION_ATTRIB.SetCell(nRow.GetAttribute("Units".SetAttribute(STR_COL_WIDTH_ATTRIB. nn.GetLength() > 0) cc.GetMinMax(min. double& max.GetLength() > 0) { cc. .strVal). max).

vector vPercResult. min. &nmax). int N.sum. dMean. nBinRule). nmax. } int nNumBins = RoundLimits(&min. int* pmax) { string strLabel = _L("Descriptive Statistics").SetSize(nNumBins). nBins).dVal = dsr. 9). vPercResult. NULL. &max.Sum. vv. ///end CHANGE_DESC_STAT_FUNC_FOR_EFFICIENCY int nInterpolate = Project. should call DescStats operation ///Echo 9/25/06 CHANGE_DESC_STAT_FUNC_FOR_EFFICIENCY //tr. vecFCount.N. vy. tr.mean. return (nRet == OE_NOERROR). vecFCount. 3 = Median + P25/P75. vector vPerc = {1.SetAttribute(STR_LABEL_ATTRIB. //tr. } // result into tr // nPercentiles = 0. dbinW).GetSize(). dbinW). none. // temp code. if(pmax) *pmax = nmax. int nErr=ocmath_percentiles(vv.Settings.min)/dbinW). vv. 1=median.GetSize().GetSize(). int* pmin. nInterpolate). tree_set_add_col_info(tr. //tr.GetMinMax(min.50. TreeNode& tr. max.GetSize(). vPerc. //ocmath_desc_stats(vv. double min. ocmath_basic_summary_stats(vv.GetSize()). &dMean. &nmin. &dSD. vPercResult.GetSize(). vecFCount. int nRet = ocmath_d_frequency_count(vy. &N. vecFCount.Mean.25.GetSize().5 + fabs((max . vy.GetSize(). int nPercentiles. &dsr). &dSum).dVal = dsr. nBins = 0.nVal = dsr. vPerc.75. 7 = Median + P25/P75 + P5/P95 + P1/P99 void vector_basic_stats(const vector& vv. OKDATAOBJ_DESIGNATION_Y.GetPercentileInterpolateMethod(). double dSum. NULL.else { dbinW = ocmath_bin_width(vy.mean.5. if(pmin) *pmin = nmin. ///Echo 9/25/06 CHANGE_DESC_STAT_FUNC_FOR_EFFICIENCY //DescStatResults dsr.GetSize(). &dbinW.SetSize(vPerc.GetSize(). strLabel). vv. vecFCount. max.N. dSD. int nmin.95. // Call ocmath function to compute frequency count ///Mouqx/Leo 2005-9-26 QA70-8110 TO_CENTRALIZE_ERROR_CODES_IN_OCMATH //return ocmath_d_frequency_count(vy. 5 = Median + P25/P75 + P5/P95. . vv. vy. min.99}.

vx.x2. &nmin.mean. 1.x1. tr.P1.Data. if(dr. tr.dVal = dSD.min. &vv.Data. tr. &dwPlotID. int nmin.median. } } bool curve_stats(const DataRange& dr.dVal = vPercResult[5]. } if(nPercentiles > 6) { tr. OKDATAOBJ_DESIGNATION_Y. vector_basic_stats(vv.dVal = vx[nmax].N. tr. NULL. &nmax). nmax. if(nPercentiles > 2) { tr. 9). tree_set_add_col_info(tr.P75. OKDATAOBJ_DESIGNATION_ERROR. tr.dVal = dMean. tree_set_add_col_info(tr. tr.dVal = vPercResult[0]. tr. 9).dVal = dSum.max. ///end CHANGE_DESC_STAT_FUNC_FOR_EFFICIENCY if(nPercentiles > 0) tr. tree_set_add_col_info(tr.//tr.dVal = vx[nmin]. tr.SD. tr. TreeNode& tr) { DWORD dwPlotID. // 1 = median tr. 0. 24).xmin.dVal = vPercResult[3]. &vx) < 0) return false. } if(nPercentiles > 4) { tr.sum.GetMinMax(x1. return true.P25. 9). tr.nVal = N.dVal = vPercResult[2]. tr.strVal = dr. x2.dVal = min.sd. x2).dVal = x2-x1. vector vv. tree_set_add_col_info(tr.dVal = x2.dVal = max. tr.P99.mean.GetDescription().GetData(DRR_GET_MISSING | DRR_GET_DEPENDENT | DRR_NO_FACTORS.dx.TypeID = TNVAL_TYPE_CSTRING.sd. tr. tr. tr.dVal = vPercResult[1].sd.dVal = x1. vx.xmax. } . OKDATAOBJ_DESIGNATION_X.dVal = vPercResult[6].dVal = vPercResult[4].dVal = dsr.P95. double x1.sd. OKDATAOBJ_DESIGNATION_ERROR.Data. tr.P5.

i1. wks.i1.Create(). DWORD dwCntrl) //XIC_NEAREST_ANY { if(!dp) return -1. } int out_tree_to_wks(TreeNode& tr. int nn = dp. wks. int ii. Tree tr.GetDataRange(dr. } return out_tree_to_wks(tr. } // See COKPlotObj::GetNearestDataPt(). DataPlot& dp. wks. dwCntrl).Rename(strWksName).SetSize(-1. } return true. "CurveStat"). if(!curve_stats(dr. . tr)) return false. bool bAppendReportSheet. otherwise find on the left int get_dataplot_index(DataPlot& dp. double x. Worksheet wks(strWksName). ii. 0). wks. double x1. double x2) { DataRange dr. // if bRight. } else {// dump to script window out_tree(tr).HideLabelByType(RCLT_COMMENT). if(bAppendReportSheet) { out_tree_to_wks(tr. int i2. then find data on the right.static bool _curve_stats(int i1. wks.GetPage().HideLabelByType(RCLT_UNIT). if(!wks) { wks. i2)) return false. wks). LPCSTR lpcszBookName) { string strWksName = lpcszBookName.XIndex(x. if(nn == XIR_LEFT_OF_RANGE) ii = dp.IsEmpty()) strWksName = "TreeOutput".HideLabelByType(RCLT_PARAM).CheckAddLabelByType(RCLT_LONG_NAME). wks. if(!dp. if(strWksName.

double xx = NANUM. int nIndex = -1.CPY 8/14/05 LAYER_DATA_READING_VERT_LINE bool find_nearest_xy_at_x(double x.GetDescription(). &yy)) nCount++. return ii. ii++) { DWORD dwPlotUID = 0. ii < nNumData.i2. if(nNumData < 1) return false. } .Add(nIndex).Add(xx). vector& vy) { DWORD dwRules = DRR_GET_DEPENDENT | DRR_NO_FACTORS | DRR_RAW_ERR_WEIGHT.GetDataRange(drPlot). vsNames. vnIndices. } } } if(drPlot) str = drPlot. const DataRange& dr. &vYtemp) && dwPlotUID) { DataPlot dp. &xx. if(dp) { dp. vector<string>& vsNames. DataRange drPlot. vy. int nNumData = dr. string str = "##". else if(nn == XIR_ERROR) return -1.GetDataPoint(nIndex. vector<int>& vnIndices. ii. yy = NANUM. dp = Project. &dwPlotUID.Add(str).GetData(dwRules.// need to find out how to remove this if(dr.else if(nn == XIR_RIGHT_OF_RANGE) ii = dp. vector& vx. int nCount = 0. vx. vector vYtemp. } //----. NULL. nIndex) != XIR_WITHIN_RANGE) nIndex = -1.GetNumData(dwRules). else { if(dp. for (int ii = 0. if(dp.XIndex(x.GetObject(dwPlotUID).Add(yy).

return true. //printf("%s: [%d. 0. nRight. DataRange& dr. if(pwks) *pwks = wks. //in logical unit.c2.WorldToPage(nLeft.nTop). } //----. c1. &dwPlotID. vy. c1=%d. r1. xmax. c2). c1.GetName(). xmin.GetMinMax(ymin.%d]. c2.c1. ymax). int* pi1. ymin. bool bData) { Layer lay. int nHeight = abs(nBottom . r2. &vx) < 0) return false. dr.GetPage(). if(dr. nBottom. &dwPlotID. vector& vx. NULL. nTop.GetMinMax(xmin. vector& vy. vx. wks.end LAYER_DATA_READING_VERT_LINE bool get_xy_data(const DataRange& dr. 0. xmax. double xmin. vector vx. like about 10 pixcel for 600 dpi int nExtra = nHeight * 0. r1.GetData(dwRule | DRR_GET_DEPENDENT | DRR_NO_FACTORS. Worksheet* pwks) { DWORD dwPlotID. if(nHeight < 100) nHeight = 100. &vy.GetParent(lay). NULL. } // bData = true to be above/below data Y range // bData = false to span layer frame // only stretch vertically bool grobj_stretch_vertical(GraphObject& gr. wks).return nCount >0 ? true:false. if(bData) { lay. ymax. c2=%d\n". ymax). lay. if(pi2) *pi2 = r2. gr.GetRange(0. int r1.GetData(DRR_GET_DEPENDENT | DRR_NO_FACTORS. if(dr.r2. . &vy.1 + 1. if(pi1) *pi1 = r1. int nLeft.WorldToPage(nRight. DWORD dwPlotID. r2.5. int* pi2. ymin). &vx) < 0) return false. nTop. Worksheet wks. bool bGetMissingValues. xmax). vy. nBottom. DWORD dwRule = bGetMissingValues? DRR_GET_MISSING : 0.

From).Root. xmin. lay.Pattern.BehindData.nVal = 0. trn.WorldToPage(nLeft.Style. // rect order line width tr.Root).nVal = nPatternStyle.Width.Root.To). int nPatternStyle. true. nTop.Height = nHeight + 2*nExtra.Y). return true. gr.Pattern.Pattern. TreeNode trn = tr. ///Sandy 2007-8-14 add error checking //gr. int nPatternColor.dVal = nBorderWidth. true).Fill. double nPatternWidth .GetFormat(FPB_ALL. //end . if(nErr == 0) { gr. int nFillColor. FOB_ALL. } else return false.} else { if(nExtra > 120) nExtra = 120.Border. xmax. true. or NONE in dialog trn. true).Width. gr.nVal = nPatternColor.nVal = bBehindData? 1:0. true. return false.Top = nTop . int nErr = gr.Height = abs(nBottom .ApplyFormat(tr. gr. sc. nErr).nTop). // 6=yellow trn.ApplyFormat(tr. // -4 corresponds to transparent. // get relative = true to get only the relavent tree nodes tr = gr. trn.dVal = nPatternWidth.// independent color for fill pattern lines trn.UpdateThemeIDs(tr. } return true. sc. } bool shape_tool_init(GraphObject& gr. //return true. trn. lay.WorldToPage(nRight. bool bBehindData) { Tree tr. } out_int("err = ". GraphLayer glay = lay.nVal = nFillColor.Root.nExtra.Pattern.UseBorderColor. gr.Color. double nBorderWidth.Top = nTop. nBottom. if(glay) { Scale sc(glay. true). // fill pattern line width trn = tr.PatternColor.

vector<int> vnIndices. { DataPlot dp. vni2. . DWORD dwCntrl. int i1 = get_dataplot_index(dp. vector<int> vni1. bool bUpdateToActive) { GraphLayer gl. bAppendReportSheet. false will dump result to script window // curvebase = [input] the curve to do the calculation from. i2. (dwCntrl & CUVS_X_MARKERS_NEAREST)? XIC_NEAREST_ANY : XIC_NEAREST_RIGHT). x1. int& i2. (dwCntrl & CUVS_X_MARKERS_NEAREST)? XIC_NEAREST_ANY : XIC_NEAREST_LEFT). it).DataPlots().} // compute descriptive statistics on a curve between two X markers // bAppendReportSheet = [input] true will append result into a book called "CurveStats". int& i1. if NULL. gl. x2. DataPlot& dataplot)// = NULL). } else dp = dataplot. return _curve_stats(i1. DataPlot& dp.Plot"). double x2. &dd).GetParent(gl). int i2 = get_dataplot_index(dp. i2. if(i1 > i2) { int it. will use the active curve in the active layer // Return: // false if no curve found bool curve_stats(double x1. dp. if(!dp) return false. x1. int nActive = dd -1.LT_execute("x=Layer. double dd. grTool. SWAP(i1. dp = gl. if(i1<0 || i2<0) return false. } bool bAppendReportSheet = (dwCntrl & CUVS_DUMP_SCRIPTWIN)? false:true. if(dataplot == NULL) { GraphLayer gl = Project.ActiveLayer(). } bool find_intersect_dataplot(GraphObject& grTool.LT_get_var("x". x2).

if(ii == nActive) { bActiveFound = true.XIndex(x2. vni1.GetMinMax(x1. int n1. double dXMin = 0. x2. int nCount = dpi. vy). if(nCount > 0) { int nIndexMin =0. vector vy. vx. x2). n1). int nIndexMax = 0. } } */ vector vx. vx. dXMax. vector<int> vIndeces. n2. vx. break.GetMinMax(dXMin.Add(n1).XIndex(x1. .Add(ii). n2). vni2. vx. vy. if(ii == nActive) { bActiveFound = true. foreach(DataPlot dpi in gl. dpi.Add(vIndeces[nIndexMin]). vni1.Add(vIndeces[nIndexMax]).Add(ii). break.GetDataPoints(grTool.Add(n2).int ii = 0. dpi. int nCount = dpi. bool bActiveFound = false. &vIndeces). vni2.DataPlots) { /// YuI 09/24/08 QA70-12265 P3 DataPlot::GetDataPoints_TO_RETURN_VECTOR_OF_INDECES /* vector vx. vnIndices. &nIndexMax). } } /// end DataPlot::GetDataPoints_TO_RETURN_VECTOR_OF_INDECES ii++. vy. if(nCount > 0) { double x1. &nIndexMin. double dXMax = 0.GetDataPoints(grTool. vnIndices.

1.0).// convert to LT index gl. double y2) { if(is_equal(x1. if(nSize < 2) return false.SetSize(nSize). const vector& vy. } } int nSelPlot = vnIndices[nn]. } bool interpolate_linear(const vector& vx. vy. double x1. vy = (vx . } else if(!bActiveFound) { if(bUpdateToActive) { LT_set_var("x". return true.Plot = x").GetSize(). vnIndices[nn] + 1.GetSize(). vy += y1.GetSize() == 0) { out_str("not intersecting any data plot"). return true. IntegrationResult& ir) ///end IMPROVE_CALCULATION_CENTROID_METHOD { int nSize = vy. . vector& vy. which is always the last nn = vnIndices.x2)) return false.} if(vnIndices. double y1. ocmath_IntegResult& ir) bool peak_integrate(const vector& vx. const vector& vbln.GetSize() > 1 && bActiveFound) { // prefer active. return false.x1) * (y2-y1)/(x2-x1).GetSize() . if(nSize != vx. const vector& vbln. i2 = vni2[nn]. const vector& vy.LT_execute("Layer. i1 = vni1[nn].DataPlots(nSelPlot). } ///Arvin 03/19/07 IMPROVE_CALCULATION_CENTROID_METHOD //bool peak_integrate(const vector& vx. double x2. int nSize = vx. } int nn = 0. vector vTemp. dp = gl.GetSize()) return false. if(vnIndices.

if(tr. int nData. "Error Bar / Weight"}. ///// Iris 02/04/06 QA70-8480 UPDATE_DATA_RANGE_MANIPULATION ////vector<string> vsLabels = {"Y Data Range". //vector<string> vsLabels = {"Y Data Range". vector vIntegral.end SIMPLE_GRAPH_ANSLYSIS_FUNCTIONS // nData = 0 will assume InputData is already a subbranch static TreeNode _tree_check_add_input_data_branch(TreeNode& trInputData. } //---------. "Y". */ LPCSTR lpcszDefault = NULL.vTemp = vy .Add(_L("X Data Range")). vTemp. vector<string> vsLabels."Y Data Range". vsLabels. "X". "Error Bar Data Range"}. TRGP_DATA_RANGE."X Data Range". 0. pDPS->szYErrRange). "ED"}. ///Leo 2005-9-26 QA70-8110 TO_CENTRALIZE_ERROR_CODES_IN_OCMATH //bool bRet = ocmath_integrate(vx. vTemp. vsLabels. . vsLabels. 0.GetSize()-1. 0.GetSize()-1.vbln. tree_check_add_input_data_branch(trInputData. pDPS->szIndepRange).SetSize(nSize). "ED"}. uint uID) { /// Iris 4/17/06 KEEP_XY_ORDER_FOR_INPUT_DATA_RANGE //vector<string> vsKeys = {"Y"."X Data Range". nData.SetAttribute(STR_PLOTOBJ_UID_ATTRIB.Add(_L("Error Bar / Weight")). TRGP_DATA_RANGE. vsKeys. vsLabels. nData. &ir. vIntegral.Add(_L("Y Data Range")). TRGP_DATA_RANGE. nData. 2.IsValid() && uID) tr.5891 QA70-6556 DATA_RANGE_MARKERS /* TreeNode tr = tree_check_add_input_data_branch(trInputData. DataPlotStrings* pDPS. 1. vTemp. /////End UPDATE_DATA_RANGE_MANIPULATION vector<string> vsKeys = {"X". vsKeys. vsLabels. vsKeys. vsLabels. vTemp. "Error Bar / Weight"}. /// Max 07-11-05 CORRECT_LOCALIZATION //vector<string> vsLabels = {"X Data Range". /// END CORRECT_LOCALIZATION /// End KEEP_XY_ORDER_FOR_INPUT_DATA_RANGE /// YuI 6/16/04 v7. if( pDPS ) lpcszDefault = pDPS->szIndepRange. int nRet = ocmath_integrate(vx. pDPS->szDepRange). tree_check_add_input_data_branch(trInputData. return (nRet == OE_NOERROR). ftoa(uID)). //return bRet. &ir. vIntegral). vIntegral).

uID). ftoa(uID)). set_plot_uid_to_tree(tr. nData. if( pDPS ) lpcszDefault = pDPS->szDepRange. &dps.GetUID(true). if( pDPS ) lpcszDefault = pDPS->szYErrRange. LPCSTR lpcszDefaultVal) { /// ML 1/13/2006 XVARIABLEBASE_TO_VC /* ASSERT(vsKeys. vsLabels. const DataPlot& dp.GetPlotType(&dps). TRGP_DATA_RANGE. nData. vsLabels. tr = _tree_check_add_input_data_branch(trInputData. 2. // not a valid data plot with a valid plot type uint uID = dp. int nPlotType = dp. TreeNode tree_check_add_input_data_branch(TreeNode& trInputData. nData.GetSize()).IsValid() && uID) //tr. lpcszDefault). if(nPlotType <= 0) return false. vsKeys. tree_check_add_input_data_branch(trInputData. vsKeys. if(tr. vector<string>& vsLabels. lpcszDefault). if(!dp) return tr. 1. TRGP_DATA_RANGE. uID). TRGP_DATA_RANGE. DataPlotStrings dps. return tr.SetAttribute(STR_PLOTOBJ_UID_ATTRIB. int nType. 0. TreeNode tr = trInputData. int nBranchType. vsLabels. vsKeys. int nData)// = 1) { TreeNode tr. const vector<string>& vsKeys. TreeNode tr = tree_check_add_input_data_branch(trInputData.tree_check_add_input_data_branch(trInputData. /// end DATA_RANGE_MARKERS } return tr. } /// YuI 8/13/04 QA70-4387 NLFITTER_OPERATION_IMPLEMENTATION TreeNode tree_check_add_input_data_branch(TreeNode& trInputData.GetSize() == vsLabels. lpcszDefault). if(nData > 0) . int nData. nData.

if( trInputData.strVal = lpcszDefaultVal.GetAttribute(STR_DATARANGE_UID_ATTRIB. STR_LABEL_ATTRIB.Format("%s%d". ii < vsKeys. nData. uint& uidDataRange) { /// ML 1/13/2006 XVARIABLEBASE_TO_VC /* string strTemp. dr. STR_LABEL_ATTRIB. &trInputData. return tr. strName.{ string strName. nType. uidDataRange). strName). if( !dr ) error_report("DataRange UID stored in InputData is bad"). } for(int ii = 0. /// end XVARIABLEBASE_TO_VC } /// YuI 11/11/04 DataRange get_data_range_from_input_data(TreeNode& trInputData. &vsKeys. } return dr. tr = tree_check_get_node(trInputData. PREFIX_FOR_DATA_RANGE_DATA. */ DataRange dr. ii++) { TreeNode trTemp = tree_check_get_node(tr. nBranchType.GetSize(). } } return tr. nData). /// end XVARIABLEBASE_TO_VC . octree_tree_check_add_input_data_branch(&tr. &vsLabels. ASSERT(nType >= 0). if(lpcszDefaultVal && (nType < 0 || nType == ii)) { trTemp. nBranchType.InitFromInputWithUID(trInputData. vsKeys[ii]. strTemp) ) uidDataRange = atoi(strTemp). strName.GetObject(uidDataRange). vsLabels[ii]). return dr. */ TreeNode tr. if(uidDataRange) { dr = (DataRange)Project. TRGP_INTERACTIVE. DataRange dr. lpcszDefaultVal).

} /// end YuI /// ML 10/20/2005 ANOVA_INPUT_DATA_TO_DATARANGE //bool prepare_input_data_for_operation(TreeNode& trInputData, bool bAddToProject) bool prepare_input_data_for_operation(TreeNode& trInputData, bool bAddToProject, int nDRTreeOption) /// end ANOVA_INPUT_DATA_TO_DATARANGE { DataRange dr; uint uidDataRange; dr = get_data_range_from_input_data(trInputData, uidDataRange); if( dr ) return false; // already created and exists no need to create new one // only if data range is not created yet need to create new one /// ML 10/20/2005 ANOVA_INPUT_DATA_TO_DATARANGE //dr.Create(trInputData, FALSE); dr.Create(trInputData, nDRTreeOption); /// end ANOVA_INPUT_DATA_TO_DATARANGE trInputData.SetAttribute(STR_DATARANGE_UID_ATTRIB, (int)dr.GetUID(true)); if(bAddToProject) { Project.AddDataRange(dr); dr.Invalidate(); } return true; } string make_range_string(const Datasheet& ds, LPCSTR lpcstrColName1, LPCSTR lpcstrColName2, int iBeginRow, int iEndRow) { /// ML 1/13/2006 XVARIABLEBASE_TO_VC //string strBookSheetName = wks_get_book_sheet_name(ds); //string strRange; ///* //if(iBeginRow > 0 && iEndRow > 0) // strExcelRange = strBookSheetName + "!$" + lpcstrColName1 + "$" + iBeginRow + ":$" + lpcstrColName2 + "$" + iEndRow; //else if(lpcstrColName2 == NULL) // strExcelRange = strBookSheetName + "!$" + lpcstrColName1 + ":$" + lpcstrColName1; //else // strExcelRange = strBookSheetName + "!$" + lpcstrColName1 + ":$" + lpcstrColName2; //return strExcelRange; //*/ //okutil_create_range_string(&strRange, lpcstrColName1, iBeginRow, lpcstrColName2, iEndRow);

//strBookSheetName += "!"; //return strBookSheetName + strRange; return ds.MakeRangeString(lpcstrColName1, lpcstrColName2, iBeginRow, iEndRow); /// end XVARIABLEBASE_TO_VC

}

/// ML 1/13/2006 XVARIABLEBASE_TO_VC /* string make_range_string(const Column& col, int iBeginRow, int iEndRow) { string str; Worksheet wks; if(col.GetParent(wks)) { return make_range_string(wks, col.GetName(), NULL, iBeginRow, iEndRow); } return str; } */ /// end XVARIABLEBASE_TO_VC bool construct_one_data_range(TreeNode& trDataRange, const Worksheet& wks, LPCSTR lpcszX, LPCSTR lpcszY, LPCSTR lpcszZ, LPCSTR lpcszErr, bool bSingle) { if(!wks.IsValid()) return false; vector<string> vsKeys = {"X", "Y", "ED"}; /// Iris 02/04/06 QA70-8480 UPDATE_DATA_RANGE_MANIPULATION //vector<string> vsLabels = {"X Data Range", "Y Data Range", "Error Bar Data Range"}; /// Max 07-11-05 CORRECT_LOCALIZATION //vector<string> vsLabels = {"X Data Range", "Y Data Range", "Error Bar / Weight"}; vector<string> vsLabels; vsLabels.Add(_L("X Data Range")); vsLabels.Add(_L("Y Data Range")); vsLabels.Add(_L("Error Bar / Weight")); /// END CORRECT_LOCALIZATION ///End UPDATE_DATA_RANGE_MANIPULATION if(lpcszZ) { vsKeys[2] = "Z"; vsLabels[2] = _L("Z Data Range"); } TreeNode tr = tree_check_add_input_data_branch(trDataRange, vsKeys, vsLabels, TRGP_DATA_RANGE, bSingle? 0:1); string strBookSheetName = wks_get_book_sheet_name(wks); tr.X.strVal = make_range_string(wks, lpcszX); tr.Y.strVal = make_range_string(wks, lpcszY);

if(lpcszZ) tr.Z.strVal = make_range_string(wks, lpcszZ); if(lpcszErr && lstrlen(lpcszErr) > 0) tr.ED.strVal = make_range_string(wks, lpcszErr); return true; } // construct tree and create datarange for operation, this tree can be used in operation as InputData branch bool make_input_data(TreeNode& trInputData, const Worksheet& wks, int nColX, int nColY, int nColYErr) { string strX, strY, strYErr; int nCols = wks.GetNumCols(); if(nColY >= nCols || nColX >= nCols || nColYErr >= nCols) return false; strX = wks.Columns(nColX).GetName(); strY = wks.Columns(nColY).GetName(); if(nColYErr >0) strYErr = wks.Columns(nColYErr).GetName(); return construct_one_data_range(trInputData, wks, strX, strY, NULL, strYErr, false); } ///End MAKE_INPUT_DATA_FROM_WORKSHEET //----- CPY 8/14/05 LAYER_DATA_READING_VERT_LINE // construct a tree node that represents all the data plots in the given graph bool construct_data_range_all_plots(TreeNode& tr, GraphLayer gl) { int nPlots = gl.DataPlots.Count(); for(int ii=0; ii< nPlots; ii++) { DataPlot dp = gl.DataPlots(ii); tree_check_add_input_data_branch(tr, dp, ii+1); } return true; } //----- end LAYER_DATA_READING_VERT_LINE string get_err_as_string(int nErrCode) { switch(nErrCode) { case ERR_DATASEL_NO_DATA_SELECTED: return _L("No Data Selected"); case ERR_DATASEL_MUST_HAVE_Z: return _L("You must select at least one Z Column");

vector<string>& vsKeys. vector<string>& vsLabels. } /// RVD 10/25/2005 v8.GetSize() % 2 != 0) return false.Time2.. wks.. if( !wks ) return false. } else { } if( vnSelCols.. case ERR_DATASEL_MUST_HAVE_X: return _L("You must select at least one X Column"). } return _L("Unknown Error"). if( dwDataRule & DRR_GET_STATS_SA_COVAR_LAST_COLS) bIncludeCovar = true.GetSelectedColumns(vnSelCols). .GetSize()-1 ).0323 NON_ACTIVE_SELECTION static Worksheet _get_wks(OriginObject& obj) { return (Worksheet) obj. if( !bIncludeCovar ) { //remove Covariance's key and label vsKeys. DWORD dwDataRule.GetSize()-1 ). Censor2. //the size of selected columns must be even since should be Time1. vector<int> vnSelCols.GetSize() ) return false. } /// end NON_ACTIVE_SELECTION /// Iris 8/29/05 SUPPORT_STATS_INPUT_FORMAT static bool _init_input_data_branch_from_selection_for_survival_analysis(TreeNode& trInputData. OriginObject& obj) { Worksheet wks = _get_wks(obj).RemoveAt( vsLabels.case ERR_DATASEL_MUST_HAVE_Y: return _L("You must select at least one Y Column")..Censor1. bool bIncludeCovar = false. vsLabels.RemoveAt( vsKeys.. if( vnSelCols.GetSize() < vsKeys.

vector<int> vnSelCols.GetName(). OriginObject& obj) { Worksheet wks = _get_wks(obj). strRange). tree_check_add_input_data_branch(trInputData. Column colCovarEnd(wks. if( !wks ) //must select data from wks for ROC return false. nRanges. col.GetName()). strRange). TRGP_DATA_RANGE. nData.GetName(). vnSelCols[nColIndex]). } //to get and init Time and Centor treenode Column col(wks. colCovarBegin.GetSelectedColumns(vnSelCols). nData. NULL). vsLabels. for(int nColIndex=0. ) { for(int nData = 0.GetSize(). wks. nColIndex<vnSelCols. return true. nData++) { //to make the columns that from 2nd to the end column as Convariance data and init treenode if(2 == nData && bIncludeCovar) { Column colCovarBegin(wks. nData<vsKeys. nRanges. if( colCovarBegin && colCovarEnd ) { string strRange = make_range_string(wks. if(col) { string strRange = make_range_string(wks.int nRanges = 1. } return true. vector<string>& vsKeys. vsKeys. vsLabels. vnSelCols[vnSelCols. TRGP_DATA_RANGE. vsKeys. colCovarEnd. } return false. .GetSize(). } nColIndex++. tree_check_add_input_data_branch(trInputData.GetSize()-1]). DWORD dwDataRule. vector<string>& vsLabels. vnSelCols[nColIndex]). } nRanges++. } static bool _init_input_data_branch_from_selection_for_ROC(TreeNode& trInputData.

} else return false. vector<string>& vsLabels. vnSelCols[0] ). vnSelCols[vnSelCols.GetSize()-1] ). TRGP_DATA_RANGE. if( !wks ) { return error_report("The active window is not worksheet.GetSize() < 2 ) { tree_check_add_input_data_branch(trInputData. NULL). 1. 1. TRGP_DATA_RANGE). vsKeys. TRGP_DATA_RANGE. strRange). return true.///Kevin 09/02/05 ADD_FOR_SELECTED_NONE_COLUMN /// Iris 10/20/05 input data need inlcude one data and one state at least //if ( 0 == vnSelCols. DWORD dwDataRule. vsLabels. colState.GetName()).GetSize() ) if ( vnSelCols. OriginObject& obj) { Worksheet wks = _get_wks(obj). return true. colDataTo. tree_check_add_input_data_branch(trInputData. 0.GetName(). vsLabels. must select data from worksheet for multiple regression"). Column colDataTo( wks. } else return false. if( colDataFrom && colDataTo) { string strRange = make_range_string(wks. vnSelCols[vnSelCols. if(colState) { string strRange = make_range_string(wks. colDataFrom. vector<string>& vsKeys. } ///End SUPPORT_STATS_INPUT_FORMAT /// Iris 10/09/05 SUPPORT_AUTO_SELECTION_DATA_FOR_MULTI_REGRESSION static bool _init_input_data_branch_from_selection_one_dep_multi_indep(TreeNode& trInputData. tree_check_add_input_data_branch(trInputData. //get the last column as State data Column colState( wks. vsLabels. . strRange). } ///End ADD_FOR_SELECTED_NONE_COLUMN //get Data from all columns except the last column Column colDataFrom( wks. vsKeys. int* pErrCode.GetName().GetSize()-2] ). vsKeys. 1.

vsKeys. int nYCol=-1. if( nn<0 ) break. nEnd = nn++.GetSize(). strRange). int nErrCode=-1. 1. ii<vnSelCols. strCols = wks.GetName()). nBegin = 0.Find('Y').} /// Iris 10/18/05 UPDATE_MR_DATA_SELECTION_BETTER /* int nn. } strRange = make_range_string(wks.GetColDesignations(). if( 0 == nBegin) nBegin = nn. tree_check_add_input_data_branch(trInputData. vsLabels.GetType(). switch(nColType) { case OKDATAOBJ_DESIGNATION_Y: .GetSelectedColumns(vnSelCols).Columns(nEnd). 1. tree_check_add_input_data_branch(trInputData.GetSize()) { for(int ii=0.GetName()). if(0 != vnSelCols. TRGP_DATA_RANGE. nErrCol=-1. strRange = make_range_string(wks. nRange = 0.Columns(nn). nXColEnd=-1. wks. wks. if( nn>=0 ) { strRange = make_range_string(wks. strRange). vsLabels.Find('M'). nCol). int nColType = col. vsKeys. if( nn<0 ) return error_report("Must has one Y column at least").GetName(). ii++) { int nCol = vnSelCols[ii]. int nEnd. strRange). nRange++.GetName()). Column col(wks. wks. vsLabels. vsKeys. nn = strCols.Columns(nBegin). wks. nn).Columns(nn). } */ vector<int> vnSelCols. TRGP_DATA_RANGE. tree_check_add_input_data_branch(trInputData. string strRange. wks.Find("X". TRGP_DATA_RANGE. nn = strCols. while(1) { nn = strCols. 1. nRange++. nRange++. nXColBegin=-1.

break. vsLabels. tree_check_add_input_data_branch(trInputData. vsKeys. tree_check_add_input_data_branch(trInputData.if(nYCol<0) nYCol = nCol. default: break. case OKDATAOBJ_DESIGNATION_X_ERROR: case OKDATAOBJ_DESIGNATION_ERROR:/// Iris 06/04/2007 MR_NEED_SUPPORT_Y_ERR_AS_WEIGHT if(nErrCol<0) nErrCol = nCol. } else nErrCode = ERR_DATASEL_NO_DATA_SELECTED.GetName()) : "". 1.Columns(nXColEnd). //X Error column strRange = (nErrCode<ERR_UNKNOWN && nErrCol>=0 )? make_range_string(wks. 1.Columns(nYCol). 2.Columns(nXColBegin). strRange). 0. wks. then make it as Y if(nYCol<0 && nXColBegin!=0) nYCol = 0.GetName(). strRange). wks. case OKDATAOBJ_DESIGNATION_X: if(nXColBegin<0) nXColBegin = nCol. vsLabels. vsLabels. nXColEnd = nCol. and if the first column is not X type. if(pErrCode) . vsKeys. break. TRGP_DATA_RANGE. 1. break. } } if(nXColBegin<0) nErrCode = ERR_DATASEL_MUST_HAVE_X. tree_check_add_input_data_branch(trInputData.GetName()) : "". strRange). vsKeys. string strRange. TRGP_DATA_RANGE. TRGP_DATA_RANGE. 1. // X columns strRange = (nErrCode<ERR_UNKNOWN)? make_range_string(wks. //Y column strRange = (nErrCode<ERR_UNKNOWN && nYCol>=0 )? make_range_string(wks. wks.GetName()) : "".Columns(nErrCol). column //if not select Y column. wks.

GetParent(objParent). int* pErrCode.Pages().} ///End SUPPORT_AUTO_SELECTION_DATA_FOR_MULTI_REGRESSION // int init_input_data_branch_from_selection_XY(TreeNode& trInputData) /// ML 1/13/2006 XVARIABLEBASE_TO_VC //bool init_input_data_branch_from_selection(TreeNode& trInputData. OriginObject& objRef. obj = project. .InitInputDataBranchFromSelection(trInputData. OriginObject& objRef. ///eND UPDATE_MR_DATA_SELECTION_BETTER return true. int* pErrCode.ActiveLayer(). dwDataRule. if( !layer ) return false. } if( !obj ) return false. pnInputPageType). pErrCode. int *pnInputPageType) /// end XVARIABLEBASE_TO_VC { /// ML 1/13/2006 XVARIABLEBASE_TO_VC return Project. obj. PageBase page. } else { Project project. int& nInputPageType) bool init_input_data_branch_from_selection(TreeNode& trInputData. DWORD dwDataRule. Layer layer. ///////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////// /// end XVARIABLEBASE_TO_VC *pErrCode = nErrCode. DWORD dwDataRule. OriginObject objParent. page = objParent. layer = Project. objRef. OriginObject obj. if( objRef ) { obj = objRef. page = Project. layer = obj.

"W"}. vector<string> vsXYZLabels. vector<string> vsXYLabels. "Y Data Range". /// END CORRECT_LOCALIZTION ///End UPDATE_DATA_RANGE_MANIPULATION vector<string> vsKeys = {"X"."Z Data Range"}. ///Iris 11/01/05 SAVE_INPUT_DATA_WND_TYPE_TO_OPERATION_TREE /// ML 1/13/2006 XVARIABLEBASE_TO_VC //if(NULL != nInputPageType) // nInputPageType = page.GetType(). "Y".Add(_L("Independent Data")). "Y Data Range". /// END CORRECT_LOCALIZTION ///End UPDATE_DATA_RANGE_MANIPULATION vector<string> vsXYZKeys = {"X". "Weighting Range"}. vsMulIndepLabels. "Independent Data".GetType(). vsXYZLabels.Add(_L("Grouping Range")). /// END CORRECT_LOCALIZTION vector<string> vsXYKeys = {"X". vsLabels. /// Iris 02/04/06 QA70-8480 UPDATE_DATA_RANGE_MANIPULATION //vector<string> vsXYLabels = {"X Data Range". vector<string> vsLabels.Add(_L("Dependent Data")).Add(_L("X Data Range")).Add(_L("Data Range")). vsXYZLabels. vsXYLabels. "Z"}.Add(_L("Error Bar / Weight")).Add(_L("Z Data Range")). vsLabels. /// Max 07-11-05 CORRECT_LOCALIZTION //vector<string> vsLabels = {"Data Range". "Y Data Range". if(NULL != pnInputPageType) *pnInputPageType = page. "Error Bar / Weight"}. "Error Bar Data Range"}. "ED"}. "Error Bar Data Range"}. . vector<string> vsMulIndepLabels. "F". "Y".Add(_L("X Data Range")).Add(_L("Y Data Range")). "Independent Data". /// Iris 02/04/06 QA70-8480 UPDATE_DATA_RANGE_MANIPULATION //vector<string> vsMulIndepLabels = {"Dependent Data". "Grouping Range". vsXYLabels. vsMulIndepLabels. /// end XVARIABLEBASE_TO_VC ///End SAVE_INPUT_DATA_WND_TYPE_TO_OPERATION_TREE vector<string> vsMulIndepKeys = {"Y".Add(_L("Y Data Range")). vsMulIndepLabels. "ED"}. "X".Add(_L("Error Bar / Weight")). "Error Bar / Weight"}. /// Max 07-11-05 CORRECT_LOCALIZTION //vector<string> vsXYLabels = {"X Data Range". /// Max 07-11-05 CORRECT_LOCALIZTION //vector<string> vsXYZLabels = {"X Data Range". /// Max 07-11-05 CORRECT_LOCALIZTION //vector<string> vsMulIndepLabels = {"Dependent Data". vsXYLabels.if( !page ) return false. vsXYZLabels.

"State"}. // trInputData.GetSelection(dr). "Covariance"}. // during lifetime of dialog there are two ranges // one is original range and another one // is dialog's proprietary range // so we need to ensure here that uid of data range in tree // does not get reset with dialog's proprietary range // but only tree updates // and origin does come here on click OK (trough OnEdit) // and there is chance that dialog's proprietary // range is selected in the graph at that time . //---. nUIDRange). "Y". vsROCLabels. int nType = obj.Add(_L("Data")). /// END CORRECT_LOCALIZTION /// Iris 8/29/05 SUPPORT_STATS_INPUT_FORMAT // for Survival Analysis vector<string> vsSAKeys = {"X". "Y"}. /// END CORRECT_LOCALIZTION ///End SUPPORT_STATS_INPUT_FORMAT if( EXIST_PLOT == page. /// YuI 11/11/04 // int nUIDRange = dr.Add(_L("Covariance")). "Censor".GetType()) { DataRange dr. vsSALabels.Add(_L("Weighting Range")). vector<string> vsROCLabels.Add(_L("Censor")). vsROCLabels. need to look into this later { if(dwDataRule & DRR_GET_DEPENDENT) { dr. "Y"}. vector<string> vsSALabels.Add(_L("State")).vsLabels.Add(_L("Time")). /// Max 07-11-05 CORRECT_LOCALIZTION //vector<string> vsSALabels = {"Time".SetAttribute(STR_DATARANGE_UID_ATTRIB. /// Max 07-11-05 CORRECT_LOCALIZTION //vector<string> vsROCLabels = {"Data".GetTree(trInputData. vsSALabels.CPY 1/4/06 Iris 12/19/05 QA70-6367-P8 DESC_STATS_ON_GRAPH if(EXIST_PLOT == nType) //CPY nType might become 0. vsSALabels. FALSE). /// END CORRECT_LOCALIZTION // for ROC vector<string> vsROCKeys = {"X".GetUID(true).

ii++) { TreeNode tr = tree_check_add_input_data_branch(trInputData. tr. ii+1).tree // so tree must be updated but range UID should not be changed in // later when it reaches SetTree // it will update Original range from tree DataRange drExisting. vsSAKeys. } else if( dwDataRule & DRR_GET_STATS_STATE_LAST_COL) { return _init_input_data_branch_from_selection_for_ROC(trInputData. ///End DESC_STATS_ON_GRAPH } } bool bGetDependents = false. uidDataRangeExisting).X. dwDataRule. uint uidDataRangeExisting. vsSALabels. drExisting = get_data_range_from_input_data(trInputData. /// Iris 8/29/05 SUPPORT_STATS_INPUT_FORMAT if( dwDataRule & DRR_GET_STATS_SA) { return _init_input_data_branch_from_selection_for_survival_analysis(trInputData. nUIDRange). layer). &arrstrDependent) <= 0) return false. return true.GetUID(true). } /// Iris 12/19/05 QA70-6367-P8 DESC_STATS_ON_GRAPH vector<string> arrstrIndependent. vsKeys. vsROCKeys. if( !drExisting ) { int nUIDRange = dr. } else ///End SUPPORT_STATS_INPUT_FORMAT . vsROCLabels. } return true. ii<arrstrDependent. trInputData. TRGP_BRANCH. } /// end YuI 11/11/04 // return nType. dwDataRule.strVal = arrstrDependent[ii].GetSize().GetSelection(arrstrIndependent. vsLabels. if( obj.SetAttribute(STR_DATARANGE_UID_ATTRIB. layer). arrstrDependent. for(int ii=0.

vsKeys.GetType()) { MatrixLayer ml = layer. if(dwDataRule & DRR_ONE_DEP_MULTIINDEP) { vsKeys = vsMulIndepKeys. return true.GetTree(trInputData.MatrixObjects().GetName()). ///End SUPPORT_AUTO_SELECTION_DATA_FOR_MULTI_REGRESSION } } /// YuI 07/19/05 QA70-7900 MATRIX_OBJECT_XFUNCTION_AUTOUPDATE else if( dwDataRule & DRR_MATRIX_DATA ) . /// YuI 11/21/05 QA70-7450 MATRIX_RANGE_SHOULD_BE_Z // dr. } } } ///End SUPPORT_GETTING_DATA_RANGE_FROM_MATRIX } else if(dwDataRule & DRR_GET_DEPENDENT) { vsKeys = vsXYKeys. vsLabels = vsXYLabels. strRange). vsLabels = vsXYZLabels.Add("Z". dr. bGetDependents = true. /// Iris 11/17/05 SUPPORT_GETTING_DATA_RANGE_FROM_MATRIX if(EXIST_MATRIX == page. /// end MATRIX_RANGE_SHOULD_BE_Z dr. /// Iris 10/09/05 SUPPORT_AUTO_SELECTION_DATA_FOR_MULTI_REGRESSION return _init_input_data_branch_from_selection_one_dep_multi_indep(trInputData. dwDataRule. layer). mo. bGetDependents = true. pErrCode.Add("X". if( ml ) { MatrixObject mo = ml. FALSE). vsLabels. DataRange dr.if(dwDataRule & DRR_GET_Z_DEPENDENT) { vsKeys = vsXYZKeys. vsLabels = vsMulIndepLabels. if( mo ) { string strRange = make_range_string(ml. strRange).

2.{ construct tree // matrix is special .GetSelection(arrstrIndependent.GetSelection will not resolve it and is not supposed to // so we here check if MatrixPage is active . int int int int nSizeDep = arrstrDependent. &arrPlotObjUIDs). 1. nSizeZ = arrstrZ.GetSize(). arrstrYErrorBars. . &arrstrZ. tree_check_add_input_data_branch(trInputData. mo. /// YuI 11/21/05 QA70-7450 MATRIX_RANGE_SHOULD_BE_Z // tree_check_add_input_data_branch(trInputData. TRGP_DATA_RANGE). vector<uint> arrPlotObjUIDs. if(dwDataRule & DRR_GET_Z_DEPENDENT) nSel = obj. if( ml ) { MatrixObject mo = ml. //prepare_input_data_for_operation(trInputData. &arrstrDependent. vsXYZKeys. if(0 == nSizeDep && 0 == nSizeIndep && 0 == nSizeErrorBars && 0 == nSizeZ) { tree_check_add_input_data_branch(trInputData. /// end MATRIX_RANGE_SHOULD_BE_Z return true. } /// end MATRIX_OBJECT_XFUNCTION_AUTOUPDATE int nSel.we ghet active matrix object and MatrixLayer ml = layer. &arrstrDependent. strRange). NULL. 1. (dwDataRule & DRR_ADD_TO_PROJECT)? true:false). vsXYZLabels.GetSelection(arrstrIndependent. TRGP_DATA_RANGE. TRGP_DATA_RANGE. nSizeErrorBars = arrstrYErrorBars. if(bGetDependents) { vector<string> arrstrDependent. vsLabels.GetSize(). strRange). } } return false. vsKeys. &arrPlotObjUIDs). 0. NULL. else nSel = obj. nSizeIndep = arrstrIndependent. vsKeys.GetSize(). arrstrZ. vsLabels. if( mo ) { string strRange = make_range_string(ml. &arrstrYErrorBars.GetSize().Project. vector<string> arrstrIndependent.MatrixObjects().GetName()).

ii < nSizeErrorBars. ftoa(arrPlotObjUIDs[ii])). ii+1. for(ii = 0.GetSize() == nSizeZ && arrPlotObjUIDs[ii] != 0 ) trRange. 2. } } else { for(ii = 0. } if(dwDataRule & DRR_GET_Z_DEPENDENT) { for(ii = 0. TRGP_DATA_RANGE. ii++ ) { TreeNode trRange = tree_check_add_input_data_branch(trInputData. arrstrYErrorBars[ii]). ii < nSizeZ. } } else { // check if Grouping range or Weight needed to be removed . TRGP_DATA_RANGE. TRGP_DATA_RANGE. vsLabels. vsKeys. else { if(dwDataRule & DRR_GET_Z_DEPENDENT) *pErrCode = ERR_DATASEL_MUST_HAVE_Z. vsLabels. vsKeys. arrstrDependent[ii]).SetAttribute(STR_PLOTOBJ_UID_ATTRIB. for(ii = 0. ii++ ) tree_check_add_input_data_branch(trInputData. } } return false. 2. if(trRange && arrPlotObjUIDs. TRGP_DATA_RANGE. 0. } int ii. if(nSel < 0) *pErrCode = ERR_DATASEL_NO_DATA_SELECTED. ii < nSizeDep. vsLabels. else if(dwDataRule & DRR_GET_DEPENDENT) *pErrCode = ERR_DATASEL_MUST_HAVE_Y.if(pErrCode) { *pErrCode = ERR_UNKNOWN. ii+1. ftoa(arrPlotObjUIDs[ii])). vsLabels. vsKeys. ii+1. ii < nSizeIndep. arrstrZ[ii]).GetSize() == nSizeDep && arrPlotObjUIDs[ii] != 0 ) trRange. ii++ ) { TreeNode trRange = tree_check_add_input_data_branch(trInputData. ii++ ) tree_check_add_input_data_branch(trInputData.SetAttribute(STR_PLOTOBJ_UID_ATTRIB. if(!(dwDataRule & DRR_GET_Z_DEPENDENT) && trRange && arrPlotObjUIDs. 1. vsKeys. arrstrIndependent[ii]). ii+1.

1= "Grouping Range".GetSelection(arrstrIndependent). nSel = obj.X. double dTo. double dFrom.Add(vsKeys[ii]). // nSel = Project.strVal = strSelection. char chDelimiter. vsKeys. vector& vVals. vsLabelsTemp. vector<string> vsLabelsTemp. double dTo) bool get_tokens_check_range(string& str. (dwDataRule & DRR_ADD_TO_PROJECT)?true:false). bool bRemoveDup) ///end DUP_PERCENT_CAUSE_BOX_CHART_PLOT_LOOKS_BAD { if( NULL == str || NULL == vVals) return false. int nSize = arrstrIndependent.Empty(). } vsKeys = vsKeysTemp.GetSelection(arrstrIndependent). tr. TRGP_BRANCH).0875 DUP_PERCENT_CAUSE_BOX_CHART_PLOT_LOOKS_BAD //bool get_tokens_check_range(string& str. if(ii == 2 && (dwDataRule & DRR_NO_WEIGHTS)) continue. . vsLabels = vsLabelsTemp.Add(vsLabels[ii]). if( 0 < nSize ) strSelection = arrstrIndependent[0]. ii < vsKeys. } //prepare_input_data_for_operation(trInputData.GetLength() ) return false. vsLabels.GetSize(). vsKeysTemp. else strSelection.if(dwDataRule & (DRR_NO_WEIGHTS | DRR_NO_FACTORS)) { vector<string> vsKeysTemp. TreeNode tr = tree_check_add_input_data_branch(trInputData. } /// end NLFITTER_OPERATION_IMPLEMENTATION /// Iris 9/10/04 /// Iris 6/02/2008 QA80-11639 v8. } string strSelection. vector& vVals. // assume 0 = "Data Range". double dFrom. 2 = "Weighting Range" for(int ii = 0.GetSize(). return true. if( 0 == str. ii++) { if(ii == 1 && (dwDataRule & DRR_NO_FACTORS)) continue. char chDelimiter.

dTemp) > 0 ) continue. ///End NEED_TRIM_EMPTY_STRING double dTemp = atof(vsTemp[ii]). chDelimiter). ii++) . ii < nTokens. ///End NEED_TRIM_EMPTY_STRING for(int ii = 0. vector<string> vsTemp.GetTokens(vTemp.GetTokens(vsTemp. } for(int ii=0.GetSize(). dTemp. } } } return true.Add(dTemp). if(vsTemp[ii]. if( NULL == vVals) { return "". string rollback_string_tokens(vector& vVals. ii<vVals. int nTokens = str.IsEmpty()) continue. chDelimiter).0875 DUP_PERCENT_CAUSE_BOX_CHART_PLOT_LOOKS_BAD vector<uint> vIndex.Find(vIndex. ii++) { /// Iris 06/26/06 NEED_TRIM_EMPTY_STRING //double dTemp = vTemp[ii].Add(dTemp). if( bRemoveDup && vVals.RemoveAll(). /// Iris 06/26/06 NEED_TRIM_EMPTY_STRING //vector vTemp. //int nTokens = str.vVals. char chDelimiter) { string str. ///end DUP_PERCENT_CAUSE_BOX_CHART_PLOT_LOOKS_BAD if( NANUM != dFrom && NANUM != dTo) { if( dTemp >= dFrom && dTemp <= dTo) vVals. /// Iris 6/02/2008 QA80-11639 v8. if(is_missing_value(dTemp)) continue. } else { vVals.

string rollback_string_tokens(StringArray& saVals. if( NULL == saVals ) { return "". } for(int ii=0. if(str. str += strTemp.{ string strTemp. char chDelimiter) { string str.Delete( str. chDelimiter).GetLength()-1 ). str. strTemp.0223 CLEAN_ACTIVE_IMPLEMENTATION /// //static int _cvt_str_to_predefined_type(const string& str) ///Iris 11/16/04 QA70-7139 UPDATE_2ND_ARGUMENT_FOR_PAGE_SHORT_NAME_FROM_DISPLAY_NAME /// int cvt_str_to_predefined_type(const string& str) int cvt_str_to_predefined_type(LPCSTR lpcsz) /// end CLEAN_ACTIVE_IMPLEMENTATION { ///DSC 6/27/05 CONSTRUCT_THEME_FILE_FROM_COMPOSITE_NAME /* /// EJP 2005-04-15 v8. } if( 0 != str.Format("%s%c". strTemp.Delete( str.0223 CLEAN_ACTIVE_IMPLEMENTATION /// if(str.GetLength() ) str.GetSize(). ii<saVals. } if( 0 != str. vVals[ii]. return str. } // <source> can be skipped as is already checked inside page_short_name_from_display_name /// EJP 2005-04-15 v8.TrimRight(). string str = lpcsz. chDelimiter).GetLength() < 3 || str[0] != '<') /// return PDS_USER. saVals[ii]. str. str += strTemp.Format("%d%c".IsEmpty()) .TrimLeft().GetLength() ) str. } return str. ii++) { string strTemp.GetLength()-1 ).

if( n < 2 ) return PDS_USER.CompareNoCase(STR_NEW) == 0) return PDS_NEW. string* pstr) //=NULL . if( str[0] != '<') return PDS_USER.CompareNoCase(STR_NONE) == 0) return PDS_NONE.CompareNoCase(STR_SOURCE_BOOK) == 0) ///Iris 11/16/04 QA70-7139 UPDATE_2ND_ARGUMENT_FOR_PAGE_SHORT_NAME_FROM_DISPLAY_NAME return PDS_SOURCE.CompareNoCase(THEME_FILENAME_FACTORY_DEFAULT) == 0) return PDS_FACTORY_DEFAULT.CompareNoCase(STR_CUSTOM) == 0) ///Iris 11/15/04 QA70-7139 ADD_CUSTOM_OPTION_TO_REPORT return PDS_CUSTOM.CompareNoCase(STR_SAME) == 0 ) return PDS_SAME. else if(str. else if(str. /// end YuI return PDS_USER. else if(str. ///DSC 3/25/03 CLEANUP_LAST_USE else if(str. str = str.0223 CLEAN_ACTIVE_IMPLEMENTATION else if( str.CompareNoCase(STR_ACTIVE) == 0 ) return PDS_ACTIVE. else if(str. str = "book1" // lpcszStr = "book1" // reutrn PDS_USER. ///end CLEANUP_LAST_USE /// EJP 2005-04-15 v8. */ return okutil_cvt_str_to_predefined_type(lpcsz).Find('>'). /// end CLEAN_ACTIVE_IMPLEMENTATION /// YuI 05/20/05 else if( str. ///end CONSTRUCT_THEME_FILE_FROM_COMPOSITE_NAME } // lpcszStr = "<source> book1-long name" // return PDS_SOURCE. str = "book1" int str_to_predefined_type(LPCSTR lpcszStr.Left(n + 1).return PDS_USER.CompareNoCase(STR_REPORT) == 0) return PDS_REPORT. else if(str. /// end CLEAN_ACTIVE_IMPLEMENTATION if(str.CompareNoCase(STR_LAST_USED) == 0) return PDS_LAST_USED. int n = str.

///Iris 4/16/05 EXISTING_BOOK_NEED_INCLUDE_HIDDEN_BOOK /* string get_show_books_in_project(string& strExclude) . ///Iris 11/16/04 QA70-7139 UPDATE_2ND_ARGUMENT_FOR_PAGE_SHORT_NAME_FROM_DISPLAY_NAME /* bool bSourceBook = false. bSourceBook). ///Iris 11/15/04 QA70-7139 ADD_CUSTOM_OPTION_TO_REPORT. /// ML 3/3/2005 XFUNCTION_OUTPUT_VARIABLES_DATA_ASSOC //if(PDS_SOURCE != nType && PDS_CUSTOM != nType) /// YuI 05/23/05 XVARIABLE_SAME_IMPLEMENTATION //if(PDS_SOURCE != nType && PDS_CUSTOM != nType && PDS_NEW != nType && PDS_ACTIVE != nType) /// Iris 3/21/2008 v8. nType).{ int nType = PDS_USER. should pass lpcszStr here if not Source Book. should pass lpcszStr here if not Source Book. if(bSourceBook) { nType = PDS_SOURCE.0829 QA80-10934 ADD_EDITBOX_TO_SPECIFICATION_BOOK_SHEET_NAME //if(PDS_SOURCE != nType && PDS_CUSTOM != nType && PDS_NEW != nType && PDS_ACTIVE != nType && PDS_SAME != nType) if(PDS_SOURCE != nType && PDS_CUSTOM != nType && PDS_NEW != nType && PDS_ACTIVE != nType && PDS_SAME != nType && PDS_AUTO != nType) ///end ADD_EDITBOX_TO_SPECIFICATION_BOOK_SHEET_NAME /// end XVARIABLE_SAME_IMPLEMENTATION /// end XFUNCTION_OUTPUT_VARIABLES_DATA_ASSOC { str = lpcszStr. } */ ///End UPDATE_2ND_ARGUMENT_FOR_PAGE_SHORT_NAME_FROM_DISPLAY_NAME string str = page_short_name_from_display_name(lpcszStr. } else { str = lpcszStr. Adding the assignment because of QA70-7168 nType = cvt_str_to_predefined_type(str). Adding the assignment because of QA70-7168 nType = _cvt_str_to_predefined_type(str). // special handling for page source since it will need to short page name in that case string str = page_short_name_from_display_name(lpcszStr. return nType. ///Iris 11/15/04 QA70-7139 ADD_CUSTOM_OPTION_TO_REPORT. } if(pstr) } *pstr = str.

Pages) { if( EXIST_WKS == pg. } */ ///end EXISTING_BOOK_NEED_INCLUDE_HIDDEN_BOOK //-------.255. if( 0 == strExclude. uint nBranch2=RGB(220.IsEmpty()) strList.Delete(strList. strTemp.Compare(pg. vector<uint> vnBranchColors. //default alternate color uint nBranch1=RGB(255. . string strTemp. uint nBranch3=RGB(192.//RGB(166. 240). 202.192).//RGB(215.0xF0).End of File /// ML 11/11/2004 QA70-6845 CELL_VALUE_LINKING void build_report_nodes_link_string(string &str. pnArrIDs[ii]). return strList.{ string strList.255. } } /// end CELL_VALUE_LINKING } if(!strList. ///Danice TREE_ROW_SHOW_ALTERNATE_COLORS void get_analysis_gui_color(string &strColors. } strList += pg. 180). 226.192.0xF0.GetName() + STR_TOKEN_SEP.255). str += strTemp. string &strSpecialIndicatorColors.GetName()) ) continue.180). foreach(PageBase pg in Project. int nCount) { str = STR_LINK_CELL_VALUE_PREFIX. string &strActiveObjectColors) //=NULL { uint nLeaf2= COLOR_WHITE. int *pnArrIDs.GetType() ) { if(PAGE_HIDDEN == pg. uint nBranch4=RGB(255. for (int ii = 0. ii++) { str += '/'. ii < nCount.GetShow()) continue.Format("%u".255.GetLength()-1).//COLOR_LTYELLOW.255). uint nLeaf1 = RGB(0xF0.

220) }. vnBranchColors. ///Danice 11/22/04 : add active object color: colorMain | colorInput | colorOutput if(NULL != strActiveObjectColors) { int nMain=0. vnBranchColors. } ///end } ///end /////////////////////////////////////////////////////////////////////////////// //--------.vnBranchColors.255. added nReadOnlyColor uint nReadOnlyColor = COLOR_BLUE. RGB(220. uint nBackGroundColor=RGB(0xF0. uint nForeColor=COLOR_RED.Add(nBranch2).Add(nBranch4).192. strActiveObjectColors=(string)nMain+"|"+(string)nInput+"|"+(string)nOutput.Add(nBranch3).220). int nSize = 4) { for(int ii = 0. RGB(192. ii < nSize. //---.192).220.CPY 12/2/04 ENABLE_READ_ONLY_USE_SEPARATE_COLOR. nLeaf2.192). uint nReadOnlyBackColor = 0. bool bCheck = false. int* nrc2. vnBranchColors).0xF0. */ strColors=get_GetN_background_colors(nLeaf1.220. // ignore strSpecialIndicatorColors=(string)nForeColor+"|"+nBackGroundColor + "|" + nReadOnlyColor + "|" + nReadOnlyBackColor. uint nLeaf1 = RGB(0xF0. RGB(220. /* uint nLeaf2= COLOR_WHITE.CPY 11/26/04 OUTPUT_FIT_CURVES_TO_SOURCE_SHEET static void __set_rc(int* nrc1.0xF0). int nOutput=COLOR_BLUE. //default black int nInput=COLOR_RED.0xF0. //default alternate color vector<uint> vnBranchColors = { RGB(255. vnBranchColors. ii++) { if(bCheck) { if(ii > 1) { .Add(nBranch1).0xF0).

&strName) && nrc[0] >= 0 && (NULL==lpcszType || strName. nrc[0]. int& c2. int& r2. true).GetNumRanges(). Worksheet& wksFirst. } } r1 = rc[0]. if(rngInput. nrc). c2 = rc[3]. int& c1. for(int ii = 0. int nWks = 0. wks. } else { } } else } } int find_input_range_bounding_box(const DataRange& rngInput. nrc[3]. int& r1. return nWks. LPCSTR lpcszType) { int nInRanges = rngInput. } else if(worksheets_are_same(wks. if(nrc2[ii] < nrc1[ii]) nrc1[ii] = nrc2[ii]. nWks++. int jj. int rc[4]. int& r2. __set_rc(rc. . nrc. int nrc[4]. } bool find_input_range_bounding_box(const DataRange& rngInput. int& c2. string strTypeSeparator) //="X" { nrc1[ii] = nrc2[ii]. c1 = rc[1]. nrc[1]. nrc[2]. ii++) { Worksheet wks.GetRange(ii. int& c1.if(nrc2[ii] > nrc1[ii]) nrc1[ii] = nrc2[ii]. ii < nInRanges.CompareNoCase(lpcszType) == 0)) { if(0 == nWks) { wksFirst = wks. string strName. r2 = rc[2]. wksFirst)) { __set_rc(rc. } else nWks++. int nDataIndex. Worksheet& wks. int& r1.

} } if(wks) { r1 = rc[0]. nrc[1]. //const static vector<int> s_vnAdvancedGUI_ids = {IDE_GEO_MEAN. ///Echo 3/22/05 v8. true). } else __set_rc(rc. IDE_USE_CUSTOM_PERCENTILES. ii < nInRanges. if(nDataIndex +1 == nGroup) break. . 159}.GetNumRanges(). IDE_IMIN. IDE_SD_X_2.GetRange(ii. IDE_SD_X_3. nrc. IDE_CUSTOM_PERCENTILES}. IDE_MAD. int rc[4].int nInRanges = rngInput. if(rngInput. int nrc[4]. r2 = rc[2]. IDE_RANGE. string strName. IDE_IMAX.0207 QA70-6204 ADD_GSD ///Echo 11/22/05 v8.0339 ADD_MODE ///Echo 5/31/06 RESET_STATS_ADVANCED_OPTIONS /* const static vector<int> s_vnAdvancedGUI_ids = { //Desc Stats //IDE_NUM_MISSING. IDE_IMAX. nrc[3]. wks = wksTemp. return true. nrc). IDE_UCI. IDE_IMIN. nrc[2]. &strName) && nrc[0] >= 0) { if(strName. IDE_IQR. for(int ii = 0. c2 = rc[3].CompareNoCase(strTypeSeparator) == 0) { nGroup++. ii++) { Worksheet wksTemp. IDE_LCI. IDE_UCI. IDE_SD_X_2. } return false. IDE_RANGE. wksTemp. IDE_USE_CUSTOM_PERCENTILES. int nGroup = -1. IDE_IQR. c1 = rc[1]. IDE_MAD. } /// Max 7/13/06 COMMENT_ALL_ADVANCED_OPTIONS // All options should be visible in regular mode ///Iris 12/27/04 IMPROVE_ACCESS_ADVAN_IDS_THEME_FILE //const vector<int> vnDescAdvanIDs = {IDE_GEO_MEAN. __set_rc(rc. IDE_MODE. IDE_LCI. IDE_CUSTOM_PERCENTILES. int jj. //IDE_SD_X_3. nrc[0].

//One Sample IDST_CHISQ_CONTROL. IDE_FIT_RSQUARE_COD. IDST_REPORT_CURVE_OPTIONS. }. IDE_PARAM_CONF_INT. IDE_FIT_REDUCED_CHI_SQUARE. IDE_WEIGHTSUM. //Optional Tables IDE_REPORT_CREATE_OPTION. IDE_IMIN. IDE_SD_X_2. IDE_GEO_SD. IDE_SD_X_2. IDE_PARAM_FIX. IDE_RANGE. //------. should not add -1 to list as it can complicate things //-1 0x7FFFFFF // for some reason this vector can not end with . IDE_SD_X_3. IDE_SD_X_3. IDE_PARAM_CONF_INT. IDST_LR_OPTIONS. IDE_IMAX. //Fit Control //Kevin 09/01/05 CHANGE_STATISTICS_NAME //IDE_PARAM_FIX. IDE_LCI. IDE_GEO_SD. IDST_ANOVA_EQUA_VAR. IDE_USE_CUSTOM_PERCENTILES. IDE_FIT_ADJ_RSQUARE. */ const static vector<int> s_vnAdvancedGUI_ids = { 0x7FFFFFF // for some reason this vector can not end with . IDE_UCI. //End CHANGE_STATISTICS_NAME //IDE_FIT_NUM_POINTS. IDE_CORSUMSQ. IDE_FIT_DOF. IDE_SKEWNESS.CPY 4/22/05 QA70-7651 DESC_STATS_ON_ROWS_CLEAN. IDE_LCL. IDE_FIT_ROOT_MSE. IDE_UNCORSUMSQ. //IDE_GEO_MEAN. ///Echo 1/6/05 UNADVANCED_RSQ_ADJRSQ //IDE_FIT_RSQUARE_COD.//IDE_VARIANCE. IDE_MAD. IDE_CUSTOM_PERCENTILES.end }. //Kevin 09/01/05 CHANGE_STATISTICS_NAME //IDE_GEO_MEAN. IDE_PARAM_LCL. IDE_FIT_NORM_REDIDUALS. //IDE_MAD. /// End COMMENT_ALL_ADVANCED_OPTIONS . IDE_FIT_NORM_REDIDUALS. IDE_COV. //The temp last one //------. //Two Sample IDST_FTEST_CONTROL. //ANOVE One Way and ANOVA Two Way IDST_ANOVA_MEAN_COMP. IDE_PARAM_LCI. //End CHANGE_STATISTICS_NAME //IDE_IQR. IDE_FIT_SSR. IDST_POWER_CONTROL. IDE_IMAX. IDE_KURTOSIS. MR and etc. IDE_PARAM_UCI. //Curve Report of LR. IDE_IMIN. IDE_COMPUTER_CONTROL. IDE_UCL. IDE_PARAM_UCL. //Function Browser Filter IDST_FB_SOURCE.

DataID).DataID. } } } */ if(bIsPro) return. ///Iris 12/31/04 IMPROVE_ACCESS_ADVANCED_IDS_FUNC //if( _get_built_in_advanced_GUI_ids(_get_advanced_ids_theme_file(trParam). vnIDs).GetAttribute(STR_ATTRIB_ISPRO.GetSize(). vector<int> vnAdvanIDs. else { int nIsPro. const vector<int>& vnProIDs. int nCount = vnIDs. bool bIsPro) { /// Iris 6/09/05 CHECK_PRO_ID_WHEN_CHECK_ADVANCE_ID /* foreach(TreeNode tr in trParam. bool bShow) { if(!trParam) return. if(1 == nCount) vnIDs. } ///end CHECK_PRO_ID_WHEN_CHECK_ADVANCE_ID } void update_param_tree_advanced_ids(TreeNode& trParam.Find(vIndex. vnProIDs[ii]). while(--nFind >= 0) vnAdvanIDs.RemoveAt(vecIndex[nFind]). ii++) { vector<uint> vecIndex.Find(vecIndex.static void _update_advanced_ids_by_pro_ids(vector<int>& vnAdvanIDs.RemoveAt(vIndex[0]). if(tr. vnProIDs[ii]. nIsPro) && !nIsPro) //is regular version { vector<uint> vIndex. vnProIDs. for(int ii=0. tr. ii<vnProIDs. int nFind = vnAdvanIDs. string strShow = bShow? "1" : "0".Children) { int nChildNodes = tr.GetNodeCount(). if(bRecursive && nChildNodes > 0) _update_advanced_ids_by_pro_ids(tr. vnIDs) ) ///Frank 4/5/05 GUI_ADVANCED_SETTING_FROM_FILE string strThemeFileName = tree_get_advanced_settings_file(trParam). . tr.

IDE_MAD. /// END COMMA_IS_MISSED_IN_FIRST_LINE IDE_CORSUMSQ. is_pro_version()).CPY 4/22/05 QA70-7651 DESC_STATS_ON_ROWS_CLEAN update_param_tree_for_pro_ids(trParam. vnAdvanIDs).0833 TTEST_IS_REPLACED_WITH_XF_AND_CONTROL_BECOMES_USELESS /* //One Sample t-test IDST_CHISQ_CONTROL. //Two Way ANOVA IDE_MEANCOMP_DUNNSIDAK. vnAdvanIDs. IDE_UNCORSUMSQ. IDE_MAD. IDE_MEANCOMP_DUNNE }. //---. is_pro_version()). _update_advanced_ids_by_pro_ids(vnAdvanIDs. vnProIDs. IDE_UNCORSUMSQ IDE_GEO_MEAN. IDE_MEANCOMP_FISHLSD. //Normality Test IDST_KOLM_CONTROL. /// Max 3/27/08 QA70-11325 v8. tree_set_attributes(trParam.if(!strThemeFileName. IDE_NORM_LILLE. if( !bShow ) tree_GETN_reset_hidden_values(trParam. IDE_MEANCOMP_HOLMSIDAK. strShow). IDE_COMPUTER_CONTROL. IDE_GEO_SD. IDE_GEO_SD.IsEmpty() && _get_advanced_GUI_ids(trParam. IDE_MEANCOMP_DUNNE. IDE_MEANCOMP_HOLMBONF. } } ///end IMPROVE_ACCESS_ADVANCED_IDS_FUNC ///Iris 3/30/05 HIDDEN_PRO_FEATURE_FOR_REGULAR_VERSION const static vector<int> s_vnProGUI_ids = { //Desc Stats /// Max 3/27/08 QA70-11325 v8. IDE_MEANCOMP_HOLMBONF. vnAdvanIDs. . IDE_MEANCOMP_FISHLSD.IDE_SD_X_2.IDE_SD_X_2. //Two Sample t-test IDST_FTEST_CONTROL. */ /// END TTEST_IS_REPLACED_WITH_XF_AND_CONTROL_BECOMES_USELESS //One Way ANOVA IDE_MEANCOMP_DUNNSIDAK.Sort(). IDE_WEIGHTSUM. IDE_SD_X_3. IDE_MEANCOMP_HOLMSIDAK. IDE_SD_X_3. strThemeFileName) && _get_Pro_GUI_ids(vnProIDs)) //if( _get_advanced_GUI_ids(vnIDs)) ///End GUI_ADVANCED_SETTING_FROM_FILE { vnAdvanIDs.0833 COMMA_IS_MISSING_IN_FIRST_LINE // the missing comma results in IDE_UNCORSUMSQ and IDE_CORSUMSQ lost effect //IDE_GEO_MEAN.

// this will be later. } static bool _get_Pro_GUI_ids(vector<int>& vnIDs) { string strProIDFile = "". else _get_built_in_Pro_GUI_ids(vnIDs). vnIDs. trNodeID. strShow). tr.AddNode("IDs"). return true. /////end UPDATE_PRO_ID_SETTING } } //------.end static void _get_built_in_Pro_GUI_ids(vector<int>& vnIDs) { vnIDs = s_vnProGUI_ids. } .IsFile()) return _load_integer_vector_from_tree_file(strProIDFile. vnIDs). vnIDs. vector<int> vnIDs. if( _get_Pro_GUI_ids(vnIDs)) { tree_set_attributes(trParam. string strShow = bIsPro? "1" : "0". a centralized place. TreeNode trNodeID = tr. } ///end HIDDEN_PRO_FEATURE_FOR_REGULAR_VERSION ///Iris 12/31/04 IMPROVE_ACCESS_ADVANCED_IDS_FUNC /* static void _generate_build_in_advanced_GUI_ids_theme_file(string strThemeFile) //template function here { Tree tr. just one file if(strProIDFile.CPY 4/22/05 QA70-7651 DESC_STATS_ON_ROWS_CLEAN void update_param_tree_for_pro_ids(TreeNode& trParam. ///// Iris 6/02/05 UPDATE_PRO_ID_SETTING //tree_set_attributes(trParam. bool bIsPro) { if(!trParam || bIsPro) return. STR_ATTRIB_ISPRO). strShow.nVals = vnDescAdvanIDs.Save(strThemeFile).//------.

IDE_VARIANCE.IDE_MODE. . vector<int>& vnIDs) { if( !strThemeFile.IDE_COV.IsFile() ) _generate_build_in_advanced_GUI_ids_theme_file(strThemeFile). Tree trAdvanIDs. return true.IDE_WEIGHTS UM. //IDE_MAD. //echo said. if( !trAdvanIDs.FirstNode.IDE_CORSU MSQ.FirstNode. if(trStatsOnRows && 1== trStatsOnRows. return true.IDE_U SE_CUSTOM_PERCENTILES.IDE_IQR. vnIDs = trAdvanIDs.IDE_MEDIAN. if( !trAdvanIDs.IDE_GEO_MEAN.nVals.IDE_IMAX.IDE_SKEWNESS. } */ static bool _load_integer_vector_from_tree_file(string strThemeFile.IDE_KURTOSIS. Q2.Load(strThemeFile)) return false.IDE_GEO_SD.IDE_CUSTOM_PERCENTILES}. Q3 vector<int> vnExceptIDs = {IDE_NUM_MISSING. vector<int>& vnIDs) { Tree trAdvanIDs.IDE_SKEWNESS. when stats on row.IDE_RANGE.IDE_KURTOSIS. should bring back Q1.IDE_IMIN. vnIDs = trAdvanIDs.StatsOnRows.IDE_UNCORSUMSQ.IDE_UNCORSUMSQ.IDE_P75.static bool _get_built_in_advanced_GUI_ids(const string& strThemeFile.IDE_COV.IDE_CORSU MSQ.nVal) { ///Cheney 2007-4-27 STATAS_ON_ROW_NOT_NEED_WDF_ETC //vector<int> vnExceptIDs = {IDE_NUM_MISSING.Load(strThemeFile)) return false. /// Iris 06/07/2007 v8/0634 STATS_ON_ROWS_GUI_INCORRECT_WHEN_DO_CHANGE_PARAM /* //If the tool is Statistics On Rows TreeNode trStatsOnRows = trGUI.IDE_P25.IDE_VARIANCE.nVals.IDE_SD_X_3.IDE_SD_X_2. vector<int>& vnIDs) { vnIDs = s_vnAdvancedGUI_ids. } static void _get_built_in_advanced_GUI_ids(const TreeNode& trGUI. //IDE_IMIN.

a centralized place.IDE_MAD. vs. } ///Frank 4/5/05 GUI_ADVANCED_SETTING_FROM_FILE static bool _get_advanced_GUI_ids(const TreeNode& trGUI.GetTokens(vs. vector<int>& vnIDs) { if(strThemeFile. return true.GetSize(). return true. return _load_integer_vector_from_tree_file(strAdvancedIDFile. vector<int>& vnIDs.Find("|") >=0 ) { vector<string> vs. vnIDs). } */ ///end STATS_ON_ROWS_GUI_INCORRECT_WHEN_DO_CHANGE_PARAM } static bool _get_advanced_GUI_ids(const TreeNode& trGUI. } static bool _load_advanced_settings_from_tree_file(string strThemeFile.IDE_RANGE. vnIDs). vector<int>& vnIDs) { string strAdvancedIDFile = "". IDE_IMIN. if(!tree_read_values_with_ids(strThemeFile.GetSize() <= 0 ) return false. for(int n= 0 .IDE_IMAX.IDE_IMIN. } if(!strThemeFile.IDE_SD_X_3. return true. strComment)) .IDE_C USTOM_PERCENTILES}.'|'). else _get_built_in_advanced_GUI_ids(trGUI. just one file if(strAdvancedIDFile. string strComment. vnIDs). vnIDs. vnIDs).IDE_USE_CUSTOM_PERCENTILES.IDE_GEO_SD.IDE_MODE. vector<string> vs.// this will be later.IsFile()) //return _load_advanced_GUI_ids_from_file(strAdvancedIDFile.IDE_IQR.IDE_GEO_MEAN. strThemeFile.IDE_SD_X_2.Append(vnExceptIDs).IDE_WEIGHTSU M. n<vs.IsFile()) return false. if(vnIDs.Add(atoi(vs[n])). vnIDs )) _get_built_in_advanced_GUI_ids(trGUI. ///end STATAS_ON_ROW_NOT_NEED_WDF_ETC vnIDs. n++) vnIDs. string strThemeFileName ) { if(!_load_advanced_settings_from_tree_file( strThemeFileName.

string strThemeFile. return true. strThemeFile).IsFile()) if(bSearch && !bExists) { nFolderLocation = ORIGIN_PATH_GROUP. if(lpcszCategoryName && *lpcszCategoryName != '\0') { strFolder += "\\". get_origin_path(nFolderLocation. LPCSTR lpcszCategoryName.CPY 2/17/04 XF_RELATED_TO_PATH // set nFolderLocation = -1 to search int xf_find_file(string& strFilePath. = -1 { bool bSearch = false. LPCSTR lpcszFunctionName. lpcszFunctionName. STR_XF_FILE_EXT). trGUI. return strThemeFile. } ///end IMPROVE_ACCESS_ADVAN_IDS_THEME_FILE */ ///end IMPROVE_ACCESS_ADVANCED_IDS_FUNC ////////////////////////////////////////////////////////////////////////////// //--. string strFolder = STR_XF_FILE_FOLDER. } // strFilePath. } // string strFormatFDF = "%s%s%s". bool bExists = okutil_make_file_path(strFilePath. int nFolderLocation )// = NULL. . // if(bSearch && !strFilePath. lpcszFunctionName. nFolderLocation = ORIGIN_PATH_USER. strFolder. strFolder += lpcszCategoryName. strFolder). if(nFolderLocation < 0) { bSearch = true. nFolderLocation. STR_XF_FILE_EXT).GetAttribute(STR_ADVANCED_IDS_THEME_FILE_ATTRIB.Format(strFormatFDF. } ///End GUI_ADVANCED_SETTING_FROM_FILE /* static string _get_advanced_ids_theme_file(const TreeNode& trGUI) { if( !trGUI) return "".return false.

STR_XF_FILE_EXT). bExists = okutil_make_file_path(strFilePath. get_origin_path(nFolderLocation.2 for num of val modified // error codes (<0) // PARSE_TOO_MANY_ENTRIES // PARSE_VARNAME_NOT_FOUND // example. strFolder).1. STR_XF_FILE_EXT). STR_XF_FILE_EXT). } //---. if( !bExists ) { nFolderLocation = ORIGIN_PATH_SYSTEM. vector<string>& vsKeys.x "some name" col(2) ///DSC QA70-7518 v8.Format(strFormatFDF. lpcszFunctionName. _nNumVals) if(_nIndex>=_nNumVals) return PARSE_TOO_MANY_ENTRIES.Format(strFormatFDF. #define IS_CHAR_NUMBER(_ch) ('0' <= (_ch) && (_ch) <= '9') //int parse_arg_str(LPCSTR lpcszArg.0207 LT_XF_COMMAND #define PARSE_KEYVAL_PAIR_MISSING_VAL -5 #define PARSE_KEYVAL_PAIR_MISSING_KEY -6 #define CHECK_PARSE_TOO_MANY_ENTRIES(_nIndex. strFolder. nFolderLocation. nFolderLocation. // if( !strFilePath. } return nFolderLocation.IsFile() ) bExists = okutil_make_file_path(strFilePath. // strFilePath. key=val // a=1 // b=2 // c=col(3) // x="testing" // y=col(5) // z=3 // lpcszArg = 10 col(1) -x "some name" col(2) // lpcszArg = . then search from current index until vsTags has // matching name and use that as new index // return 0. lpcszFunctionName. lpcszFunctionName. vector<string>& vsVals. STR_XF_FILE_EXT).Empty(). but // if token in the form of -name. strFolder). vector<string>& vsKeysSet) .// strFilePath. bool bKeyPartialMatch. lpcszFunctionName.end CPY 2/17/04 XF_RELATED_TO_PATH // given Tag = Val pairs // pass through each token in lpcszArg and assign each token into val.IsFile()) if(!bExists) { strFilePath. return -1. get_origin_path(nFolderLocation. } } // if(!strFilePath. strFolder. bool bKeyCaseSensitive.

swithces are required. int nLastNonswitchedArg=-1.GetLength().//return value char cDash = '-'.GetSize(). StringArray* pvsKeysSet) { /* int nNumKeys = vsKeys. Cannot specify with switch an argument already set with or without a switch.check space int nLen = strTok. bFoundKey = okutil_is_in_list(strKey. if(nLen==1) // && !bSpaceAfterDash) { out_str(ERR_NEED_SPACE_AFTER_DASH). &nKey). //-. string strTok. int nValIndex=-1. but order not important. NULL. StringArray* pvsVals. int nNumValsSet. nTok<nNumTokens. if( strTok[0]==cDash && !is_numeric(strTok) )// have a -Key Val pair. false. and not say -1 { bHaveSwitch=true. nTok++) { strTok = vsTokens[nTok].get key after dash string strKey = strTok. for(int nTok=0. //-. bool bKeyCaseSensitive.The first nLastNonswitchedArg arguments may appear without switches //-. bKeyPartialMatch. string strErr. &vsKeys. ASSERT(nNumKeys == nNumVals). bKeyCaseSensitive.int parse_arg_str(LPCSTR lpcszArg. } //-.GetTokens(vsTokens). int nNumTokens = strArgStr. vector<string> vsTokens. int nKey.Mid(1).After that. StringArray* pvsKeys. string strArgStr = lpcszArg. //-. bool bKeyPartialMatch.GetSize(). 0 . int nNumVals = vsVals. bool bHaveSwitch=false.find key in vsKeys. return PARSE_SPACE_AFTER_DASH. starting from nValIndex bool bFoundKey=false. //false.

now get Val. which should be next token if(++nTok>=nNumTokens) { strErr. CHECK_PARSE_TOO_MANY_ENTRIES(nKey.Format(ERR_VAL_ALREADY_SPECIFIED.Format(ERR_KEY_IS_MISSING_VAL. return PARSE_VARNAME_NOT_FOUND. strKey.Format(ERR_NO_ARG_BY_THAT_NAME. out_str(strErr). and starting another -Key Val pair { strErr. out_str(strErr). } //-. return PARSE_BAD_ARG_ORDER.check and assign if(strTok[0]==cDash && !is_numeric(strTok))// have no value. strKey. // nTok--. return PARSE_VARNAME_NOT_FOUND.GetLength()>0) //already set { strErr. } out_str(strErr). strKey). if(!bFoundKey) { strErr. } vsVals[nKey]=strTok. } else { vsVals[nKey]). return PARSE_KEYVAL_PAIR_MISSING_VAL. } } else // then token is a value .if(bFoundKey && nKey<=nLastNonswitchedArg) { strErr.Format(ERR_KEY_IS_MISSING_VAL. out_str(strErr). } strTok = vsTokens[nTok].Format(ERR_VAL_ALREADY_SPECIFIED.// return PARSE_KEYVAL_PAIR_MISSING_VAL. strKey). vsVals[nKey]). nNumVals). //-. if(vsVals[nKey]. strKey). nNumValsSet++.

Find('a')>=0) . return okutil_parse_arg_str( lpcszArg. } CHECK_PARSE_TOO_MANY_ENTRIES(++nValIndex. nNumValsSet++. vsVals[nValIndex]=strTok. DWORD dwOptions) { string strOptions = lpcszOptions. nNumVals). strTok). bKeyPartialMatch.GetLength()==0 || strOptions. pvsKeys. if(strOptions. out_str(strErr). &vsKeys. } ///end LT_XF_COMMAND static DWORD _list_xf_dword_options_from_str_options(LPCSTR lpcszOptions.Find('a')>=0) dwOptions |= LIST_XF_ARG_NAME. pvsKeysSet). nLastNonswitchedArg++.Find('t')>=0 && strOptions. } ///end ADD_SORT_XF_LIST_OPTIONS ///DSC 6/16/05 ADD_SORT_XF_LIST_OPTIONS //if(strOptions. } } return nNumValsSet. ///end ADD_SORT_XF_LIST_OPTIONS if(strOptions.{ if(bHaveSwitch) { strErr. dwOptions |= LIST_XF_SORT_NAME. ///DSC 6/16/05 ADD_SORT_XF_LIST_OPTIONS // if(strOptions. pvsVals. */ // return okutil_parse_arg_str( lpcszArg.GetLength()>3) // return 0x0.Format(ERR_VAL_IS_MISSING_KEY. if(strOptions.GetLength()==0) { // set defaults dwOptions |= LIST_XF_ARG_NAME. bKeyPartialMatch. return dwOptions. return PARSE_KEYVAL_PAIR_MISSING_KEY. bKeyCaseSensitive).Find('n')>=0) // dwOptions |= LIST_XF_ARG_NAME. bKeyCaseSensitive. &vsVals.

Find('l')>=0) dwOptions |= LIST_XF_LOCATION.// show brief info after xf name return dwOptions.// remove duplicates.Find('h')>=0) dwOptions |= LIST_XF_SHOW_HELP.Find('d')>=0 && strOptions.Find('c')>=0) dwOptions |= LIST_XF_CATEGORY.Find('v')>=0) dwOptions |= LIST_XF_ARG_MORE_INFO. if(strOptions. if(strOptions. if(strOptions.Find('a')>=0) dwOptions |= LIST_XF_ARG_DEFAULTS.Find('u')>=0) dwOptions |= LIST_XF_REFRESH_LIST. printf("a = show argument names\nt = show argument types\nd = show default argument values\nl = show X-Function location\nc = show X-Function category\nn = sort by X-Function name\ns = sort by location. if(strOptions. category. printf("options are combinations of the following letters\n").dwOptions |= LIST_XF_ARG_TYPES.CPY 4/15/05 HELP_XF_LISTING static int _show_list_x_function_options(LPCSTR lpcszOptions) { printf("usage:\nlist xf [name [options]]\n"). } //-. //printf("n = argument names\nt = argument types\nd = default values\nl = X-Function location\nc = X-Function category\n"). if(strOptions.// sort by file 'n'ame ///end ADD_SORT_XF_LIST_OPTIONS if(strOptions.Find('n')>=0) dwOptions |= LIST_XF_SORT_NAME.Find('r')>=0) dwOptions |= LIST_XF_REMOVE_DUPLICATES.// more info about arguments if(strOptions. show only first in list.// show info in help node if(strOptions.Find('s')>=0) // 's'ort dwOptions |= LIST_XF_SORT_DISPLAY. ///DSC 6/16/05 ADD_SORT_XF_LIST_OPTIONS //if(strOptions. X-Function name\nr = remove duplicate same named X- . printf("name can have wildcard * and ?\n").// update map if(strOptions.Find('b')>=0) dwOptions |= LIST_XF_SHOW_BRIEF_INFO.

if(LIST_XF_ARG_TYPES & dwDisplayOptions) { string strSep. for(int jj = 0. printf("\nYou can also see each X-Function by \n\nmy_xf=\n\n"). if(LIST_XF_ARG_DEFAULTS & dwDisplayOptions) { if(!vsVarDefaultVals[jj]. string str= "[". } if(LIST_XF_ARG_NAME & dwDisplayOptions) strArgs += vsVarNames[jj]. //int nIO = nIO.nVal) str += LIST_XF_IN + "] ". with User folder having precedence\nv = show variables information\nu = update xfunction list\n").name.strVal. .io. } //--static string _format_all_arg_str(const vector<string>& vsVarNames. const vector<string>& vsVarDefaultVals. } } } #define LIST_XF_RETURN "return" #define LIST_XF_DESCRIPTION "description" /* #define LIST_XF_IN "in" #define LIST_XF_OUT "out" #define LIST_XF_TYPE "type" #define LIST_XF_DEFAULT "default" static string _make_xf_variable_info_str(TreeNode trV.IsEmpty()) strArgs += "="+vsVarDefaultVals[jj]. //str += " = [".GetSize().Functions. DWORD dwDisplayOptions) { string strArgs="\t". const vector<string>& vsVarTypes. if(IO_INPUT==trV. return 0. jj++) { if(jj > 0) strArgs +=" ". if(LIST_XF_ARG_NAME & dwDisplayOptions) strSep = ":". printf("\nexample:\n\nlist xf my* lctad\n"). return strArgs. const string& vsVarDefaultVals) { //string str = trV. strArgs += vsVarTypes[jj] + strSep. jj < vsVarNames.

string strXFname = lpcszXFname. lpcszXFfile. LTXF_BY_FILENAME | LTXF_LIST)) //-----return false. LPCSTR lpcszKeywords) bool _list_xf(LPCSTR lpcszXFname. DWORD dwDisplayOptions. string strDescription = tree_find_node_by_dataID(trV.strVal.io. "************"). String* p_strOutput=NULL) // /// end ADD_OUTPUT_OPTION_TO_LIST_XF ///end ALLOW_SEARCH_XF_BY_KEYWORDS { XFunction xf. strArgs = _format_all_arg_str(vsVarNames. IDV_VAR_LOCAL_HELP_BRANCH). str += ". DWORD dwDisplayOptions.nVal) str += LIST_XF_OUT + "] ".strVal. } */ ///DSC 2/14/06 QA70-7518 ALLOW_SEARCH_XF_BY_KEYWORDS // bool _list_xf(LPCSTR lpcszXFname. //-----.MakeValidCName(). vsVarTypes. LPCSTR lpcszXFfile. string strArgs. " + LIST_XF_DESCRIPTION + "=" + strDescription. DWORD dwDisplayOptions) /// DSC 2/27/06 ADD_OUTPUT_OPTION_TO_LIST_XF // bool _list_xf(LPCSTR lpcszXFname. " + LIST_XF_DEFAULT + "=" + vsVarDefaultVals.else if(IO_OUTPUT==trV. vsVarTypes). //out_tree(trXF). LTXF_BY_FILENAME)) if(!xf. xf.strip space etc . else if(IO_INOUTPUT==trV. //-.type. LPCSTR lpcszXFfile. LPCSTR lpcszCompositeName. lpcszXFfile. if(!strDescription. vsVarTypes. LPCSTR lpcszCompositeName. TreeNode trXF.io.IsEmpty()) str += ".//trV. LPCSTR lpcszKeywords=NULL. vsVarDefaultVals. vsVarDefaultVals. vsVarDefaultVals.Load(&trXF. LPCSTR lpcszCompositeName. IDV_VAR_HELP_DESCRIPTION.nVal) str += LIST_XF_IN + "/" + LIST_XF_OUT + "] ". //tree_dump(trXF.CPY 5/21/07 LX_CRASHING_WHEN_LISTING_SELF //if(!xf. dwDisplayOptions).description.GetVariables(vsVarNames.Load(&trXF. } strXFname. LPCSTR lpcszXFfile. str += LIST_XF_TYPE + "=" + trV. if(dwDisplayOptions & LIST_XF_ARG_MASK) { vector<string> vsVarNames. return str.

strSearchText)) return false. string strCategory. else strDisplayText = strNameToShow.GetKeywordSearchText(&strDisplayText).string strNameToShow. if((dwDisplayOptions & LIST_XF_PATH_MASK) && lpcszCompositeName) { okutil_separate_composite_name(lpcszCompositeName. string strKeywords = lpcszKeywords. &strCategory). DWORD dwCtrl= FF_REMOVE_PATH | FF_REMOVE_CATEGORY. &nPath. strXFname. ///end MORE_LISTXF_OUTPUT_FORMATTING ///end FIX_DISPLAY_NAME_ORDER //if(LIST_XF_ARG_MORE_INFO & dwDisplayOptions || LIST_XF_SHOW_HELP & dwDisplayOptions) //{ // printf("---------------\n"). . dwCtrl). &nPath. int nPath = ORIGIN_PATH_UNDEF. &strCategory).IsEmpty()) dwCtrl &= ~FF_REMOVE_CATEGORY. if(dwDisplayOptions & LIST_XF_LOCATION) dwCtrl &= ~FF_REMOVE_PATH. } if(LIST_XF_SHOW_BRIEF_INFO & dwDisplayOptions) strDisplayText = strNameToShow + "\t" + strCategory + ": " + strDisplayText. // search category string also string strSearchText = strXFname + "\t" + strCategory + ": ". strCategory. okutil_separate_composite_name(lpcszCompositeName. //} ///DSC 2/14/06 QA70-7518 ALLOW_SEARCH_XF_BY_KEYWORDS string strDisplayText. strNameToShow = okutil_composite_name_from_components(nPath. strSearchText += xf. } ///DSC 6/15/05 FIX_DISPLAY_NAME_ORDER ///DSC 7/13/05 QA70-7518 MORE_LISTXF_OUTPUT_FORMATTING // strNameToShow += strXFname.Compare("*")!=0)// filter based on keywords { // for now assume one keyword strKeywords.MakeLower(). if((dwDisplayOptions & LIST_XF_CATEGORY) && !strCategory. if(lpcszKeywords && strKeywords. if(!is_str_match_begin_of_word(strKeywords .

trV.SetAttribute(STR_LABEL_ATTRIB. strReturnType. trV.name.type. str1. trV.description. /// end ADD_OUTPUT_OPTION_TO_LIST_XF // if(lpcszKeywords && strKeywords. vsVarDefaultVals.valdata.strVal.Remove(). strDisplayText). ///end ALLOW_SEARCH_XF_BY_KEYWORDS ///DSC 7/28/05 SUPPORT_HELP_XF_IF_NOT_LT // if(LIST_XF_ARG_MORE_INFO & dwDisplayOptions) // show variable list if( (LIST_XF_ARG_MORE_INFO & dwDisplayOptions) && 1==trXF. //trV. vector<string> vsVarNames. trV. string str.Remove(). .UsageCtxt.comboString.Remove().nVal) // show variable list ///end SUPPORT_HELP_XF_IF_NOT_LT { /* Tree trVars. str). //trV.LabTalk. foreach(TreeNode trV in trVars. else printf("%s\n".help.context.strVal.Data.Remove().GetVariables(vsVarNames.Children) { str = _make_xf_variable_info_str(trV.Compare("*")!=0) // return true.vars.Remove(). vsVarTypes). vsVarDefaultVals. trVars = trXF. int ii=0.name. trV. trV.io. string strName = trV. string strCombo = trV. if(p_strOutput) *p_strOutput = strDisplayText. xf. trV./// DSC 2/27/06 ADD_OUTPUT_OPTION_TO_LIST_XF // printf("%s\n".Remove(). strDisplayText).Remove(). vsVarTypes. vsVarDefaultVals[ii++]).Remove().

. TreeNode trtemp = tree_check_get_node(trV.SetAttribute(STR_LABEL_ATTRIB.SetAttribute(STR_LABEL_ATTRIB. string strVarsInfo = xf. .strVal.GetTokens(vs. //.CompareNoCase("void")!=0) strTitle += " " + LIST_XF_RETURN + "=" + strReturnType.strVal = str. for(int ii=0. string strReturnType..GetReturnType(strReturnType).GetSize(). if(strCombo. xf. ii++) { str1 = (string)"n" + (1+ii). trV. str1 = ii.IsEmpty() && strReturnType.SetAttribute(STR_LABEL_ATTRIB. if(!strReturnType.comboString. vector<string> vs.xFdescription. trtemp. strAttr).Remove().SetAttribute(STR_LABEL_ATTRIB.GetVarsHelpInfo(). strNameToShow).CompareNoCase("0|1")==0) strCombo = "false|true". } } else { } } */ trV. strAttr+=str. trtemp.strVal = vs[ii]. str1).CompareNoCase("1|0")==0) strCombo = "true|false". str1). ii<vs. trV.IsEmpty()) // need to convert into a branch with expanded listing { string strAttr = strName. strAttr+= " = ". '|'). if(!strCombo. trV.if(strCombo[0]=='|')// starts with | means editable. strName). //trVars. if(strCombo. string strTitle = strNameToShow. strCombo. so dont show list strCombo="". ///DSC 7/19/05 MAKE_GET_RETURN_TYPE_PUBLIC string strDescription = trXF.

if(p_strOutput) *p_strOutput += "\n" + strVarsInfo.strVal. ///end ALLOW_SEARCH_XF_BY_KEYWORDS if(LIST_XF_SHOW_HELP & dwDisplayOptions) { string strHelp. //else if (ORESLANG_GERMAN == nresLang) // strHelp = trXF. strSeeAlso. ///end ALLOW_SEARCH_XF_BY_KEYWORDS /// DSC 2/27/06 ADD_OUTPUT_OPTION_TO_LIST_XF // printf("%s\n". printf("\n"). LT_get_var("@RL".summaryj. out_tree(trVars).help. strExample. /// end ADD_OUTPUT_OPTION_TO_LIST_XF } ///DSC 2/14/06 QA70-7518 ALLOW_SEARCH_XF_BY_KEYWORDS // else // printf("%s %s\n". .if(!strDescription.SetAttribute(STR_LABEL_ATTRIB.strVal. strTitle). strVarsInfo). &resLang).strVal. strVarsInfo). strNameToShow. strArgs). */ ///DSC 2/14/06 QA70-7518 ALLOW_SEARCH_XF_BY_KEYWORDS // printf("%s\n". int nresLang = nint(resLang). strTitle). if (ORESLANG_JAPANESE == nresLang) strHelp = trXF. strRef. /* trVars.help. else printf("%s\n".IsEmpty()) // strHelp = trXF.helpj.IsEmpty()) strTitle += " " + LIST_XF_DESCRIPTION + "=" + strDescription. /* ///DSC 8/5/05 QA70-7518 ADD_HELP_J_G_TO_XF double resLang = 0. //if(strHelp. //if (ORESLANG_JAPANESE == nresLang) // strHelp = trXF.strVal.helpg.

else printf("%s\n". printf("%s\n". DWORD dwCtrl)//NULL. /// end ADD_OUTPUT_OPTION_TO_LIST_XF } return true. NULL.example. LPCSTR lpcszKeywords)//NULL. */ /* strExample = trXF. ORIGIN_PATH_UNDEF. strExample).strVal. LIST_XF_HELP_INFO.help.help.strVal. } */ ///end ADD_HELP_J_G_TO_XF strHelp = xf.ref. printf("---------------\n"). } /// DSC 2/27/06 ADD_OUTPUT_OPTION_TO_LIST_XF // /// DSC 2/14/06 QA70-7518 ALLOW_SEARCH_XF_BY_KEYWORDS // // int list_x_functions(LPCSTR lpcszName. printf("%s\n".help. printf("---------------\n\n\n").strVal. ORIGIN_PATH_UNDEF. NULL. 0). strHelp). NULL . strRef).strVal. printf("%s\n". if(p_strOutput) *p_strOutput += "\n" + strHelp. strSeeAlso).else if (ORESLANG_GERMAN == nresLang) strHelp = trXF. printf("%s\n".GetHelpInfo(true.IsEmpty()) strHelp = trXF. LPCSTR lpcszOptions. 0x0 // int list_x_functions(LPCSTR lpcszName. strHelp). /// DSC 2/27/06 ADD_OUTPUT_OPTION_TO_LIST_XF // printf("%s\n". LPCSTR lpszCategory. int nLocation. if(strHelp.seeAlso.help. strHelp).summaryg. //printf("\n\n\n").summary. if(!strHelp. 0x0. strSeeAlso = trXF.strVal.IsEmpty()) { printf("%s%s:\n".help. int nLocation. DWORD dwCtrl. strRef = trXF. LPCSTR lpcszOptions. LPCSTR lpszCategory. strXFname).

// init to all usage contexts DWORD* pdwUsgCtxts=NULL.CPY 4/15/05 HELP_XF_LISTING // int nLen = lstrlen(lpcszName). NULL // /// end ADD_OUTPUT_OPTION_TO_LIST_XF /// end ALLOW_SEARCH_XF_BY_KEYWORDS { //---. // if(nLen >= 1 && *lpcszName == '?' || (*lpcszName == '-' && (lpcszName[1] == '?' || lpcszName[1] == 'h' || lpcszName[1] == 'H'))) // return _show_list_x_function_options(lpcszOptions). DWORD dwCtrl. ///DSC 12/15/06 LIST_XF_IF_NOT_GRAPHIC_OBJECT_EVENT . int nNumUsgCtxts = 1. NULL. // // User Folder|Group Folder|System Path|All // // Need to convert All to to internal enum value ORIGIN_PATH_UNDEF // if(3==nLocation) // nLocation=ORIGIN_PATH_UNDEF. NULL."?") == 0) { FUNC_STR pfn = Project. ///DSC 8/2/05 use Help xfname instead // //-. DWORD dwDisplayOptions =_list_xf_dword_options_from_str_options(lpcszOptions. TRUE). ///DSC 2/9/06 QA70-7835 ORIGIN_PATH_PROJECT_INSERTED lx converts nLocation from combo list of locations to enum ORIGIN_PATH_TYPE // // "All" is index 3 in combo string of variable "location" in tree view of xfunction "listxf". ///end vector<string> vsFiles. if(pfn) pfn(lpcszName). } //---string strName = lpcszName.FindFunction("XFShowHelp". added nXFUsage DWORD nXFUsage. StringArray* p_saOutput)//NULL. 0). 0x0. //-. return 0. LPCSTR lpcszKeywords.get all x-functions ///DSC 7/28/05 SUPPORT_HELP_XF_IF_NOT_LT // int nXFUsage = IDXF_USGCTXT_LABTALK. int nLocation.CPY 9/15/05 SHOW_XF_HELP_IN_TX if(lstrcmp(lpcszOptions. NULL.int list_x_functions(LPCSTR lpcszName. LPCSTR lpszCategory. vsNames. LPCSTR lpcszOptions. // if(0==dwDisplayOptions) // dwDisplayOptions = LIST_XF_ARG_NAME. ORIGIN_PATH_UNDEF. //CPY 4/5/05 XF_LT_CHECK_USAGE_CONTEXT.

SUPPORTFILE_XF. "xfname -d" in script window. 1). &vsFiles. if(LIST_XF_REMOVE_DUPLICATES & dwDisplayOptions) ///DSC 10/11/2005 QA70-8167 FIND_FILES_FROM_SPECIFIED_LOCATION ///nNumFiles = okutil_find_files(&vsNames. //if(LIST_XF_SHOW_DUPLICATES & dwDisplayOptions) ///DSC 7/28/05 SUPPORT_HELP_XF_IF_NOT_LT // if(LIST_XF_REMOVE_DUPLICATES & dwDisplayOptions) // nNumFiles = okutil_find_files(&vsNames. SUPPORTFILE_XF. ORIGIN_PATH_PREFER_USER ///end FIND_FILES_FROM_SPECIFIED_LOCATION else nNumFiles = okutil_find_files_from_map(&vsNames. &vsFiles. int nNumFiles. 0x0. SUPPORTFILE_XF. pdwUsgCtxts. &nXFUsage. SUPPORTFILE_XF.// = NOT graphic object events ///DSC 12/15/06 LIST_XF_IF_NOT_GRAPHIC_OBJECT_EVENT Max requests to allow "fitNL -d". 1). NULL. &vsFiles. 0x0.if( !(LXF_NO_LT_CHECK & dwCtrl) ) { nXFUsage = IDXF_USGCTXT_LABTALK. ORIGIN_PATH_UNDEF). } ///end SUPPORT_HELP_XF_IF_NOT_LT if(LIST_XF_REFRESH_LIST & dwDisplayOptions) { NOTIFY_XF_FILE_CHANGE } ///DSC 6/16/05 ADD_SORT_XF_LIST_OPTIONS why FF_REMOVE_DUPLICATE?? // int nNumFiles = okutil_find_files(&vsNames. // else // nNumFiles = okutil_find_files(&vsNames. nNumUsgCtxts). &vsFiles. CP says list all except xf's with graphic object events checked pdwUsgCtxts = &nXFUsage. 1). dwDisplayOptions). ///end SUPPORT_HELP_XF_IF_NOT_LT ///end ADD_SORT_XF_LIST_OPTIONS /// DSC 3/21/06 SORT_INSIDE_FIND_FILES // ///DSC 6/16/05 ADD_SORT_XF_LIST_OPTIONS // if(LIST_XF_SORT_MASK & dwDisplayOptions) // sort_composite_name_vs(vsNames. 0. pdwUsgCtxts. 1). NULL. &vsFiles. FF_REMOVE_DUPLICATE. even though these are not IDXF_USGCTXT_LABTALK. &nXFUsage. //CPY 4/5/05 XF_LT_CHECK_USAGE_CONTEXT. nNumUsgCtxts. // . added nXFUsage //nXFUsage = -1*IDXF_USGCTXT_GO. nNumFiles = okutil_find_files_from_map(&vsNames. &vsFiles. NULL. FF_REMOVE_DUPLICATE. &nXFUsage. FF_REMOVE_DUPLICATE. . SUPPORTFILE_XF. SUPPORTFILE_XF. pdwUsgCtxts. NULL.

// ///end ADD_SORT_XF_LIST_OPTIONS /// SORT_INSIDE_FIND_FILES string strCategory; if(lpszCategory) strCategory = lpszCategory; /// special case lc if( (LXF_FROM_XF_LC & dwCtrl) && strName.CompareNoCase("")==0 && strCategory.CompareNoCase("")==0 ) // lc list categories { vector<string> vsCats; for(int ii=0; ii<nNumFiles; ii++) { int nPathType = ORIGIN_PATH_UNDEF; string strCat; string strMatchingName = okutil_separate_composite_name(vsNames[ii], &nPathType, &strCat); /// Hong 05/23/07/ QA80-9815 v8.0624 FIX_LOCATION_NOT_WORK_FOR_LIST_CATEGORY if(ORIGIN_PATH_UNDEF!=nLocation && nPathType!=nLocation) continue; /// end FIX_LOCATION_NOT_WORK_FOR_LIST_CATEGORY if(-1==vsCats.Find(strCat))// not found vsCats.Add(strCat); } vsCats.Sort(); for(ii=0; ii<vsCats.GetSize(); ii++) { /// DSC 2/27/06 ADD_OUTPUT_OPTION_TO_LIST_XF // printf("%s\n", vsCats[ii]); if(p_saOutput) p_saOutput->Add(vsCats[ii]); else printf("%s\n", vsCats[ii]); /// end ADD_OUTPUT_OPTION_TO_LIST_XF } } return 0;

string strMatchingName; //-- find those that match lpcszName vector<string> vsCats; for(int ii=0; ii<nNumFiles; ii++) { // if( vsNames[ii].Match(lpcszName) ) ///DSC 6/16/05 ADD_SORT_XF_LIST_OPTIONS

// strMatchingName = GetFileName(vsFiles[ii], true); // if( strMatchingName.Match(lpcszName) ) // _list_xf(strMatchingName, vsFiles[ii], vsNames[ii], dwDisplayOptions); ///DSC 6/29/05 QA70-7518 LISTXF_OXF // strMatchingName = okutil_separate_composite_name(vsNames[ii], NULL, NULL); int nPathType = ORIGIN_PATH_UNDEF; string strCat; string strMatchingName = okutil_separate_composite_name(vsNames[ii], &nPathType, &strCat); //string strCategory; //if(lpszCategory) // strCategory = lpszCategory; if(!strCategory.IsEmpty() && !strCat.Match(strCategory)) continue; if(ORIGIN_PATH_UNDEF!=nLocation && nPathType!=nLocation) continue; ///end QA70-7518 LISTXF_OXF strMatchingName.MakeValidCName(); //-- strip space etc if( strMatchingName.Match(lpcszName) ) { string strFullpath; okutil_find_file_from_composite_name(&strFullpath, vsNames[ii], SUPPORTFILE_XF); /// DSC 2/14/06 QA70-7518 ALLOW_SEARCH_XF_BY_KEYWORDS // _list_xf(strMatchingName, strFullpath, vsNames[ii], dwDisplayOptions, lpcszKeywords); /// DSC 2/27/06 ADD_OUTPUT_OPTION_TO_LIST_XF string strOutput, *pstrOutput; if(p_saOutput) pstrOutput=&strOutput; else pstrOutput=NULL; _list_xf(strMatchingName, strFullpath, vsNames[ii], dwDisplayOptions, lpcszKeywords, pstrOutput ); if(p_saOutput) p_saOutput->Add(strOutput); /// end ADD_OUTPUT_OPTION_TO_LIST_XF /// end ALLOW_SEARCH_XF_BY_KEYWORDS } ///end ADD_SORT_XF_LIST_OPTIONS

} } return 0;

///DSC 8/18/05 AUTOCOMPLETE_XF_CMD_LINE_ARGUMENTS //int get_remaining_xf_arg_list(StringArray* p_saRemainingArgList, XFunction& xf, LPCSTR lpcszCmdLineSoFar) //int get_remaining_xf_arg_list(StringArray* p_saRemainingArgList, LPCSTR lpcszXFName, LPCSTR lpcszCmdLineSoFar) //pass in cmd line and get remaining unassigned arguments... /// YuI 10/10/05 QA70-8158 GENERAL_X_STRING_CONVENTION_TO_ACCESS_OBJECTS // #define XF_VARIABLE_ASSIGNMENT_STR ":=" /// end GENERAL_X_STRING_CONVENTION_TO_ACCESS_OBJECTS int get_remaining_xf_arg_list(StringArray* p_saRemainingArgList, LPCSTR lpcszCmdLineSoFar) { // get xf object // find xf string strCmdSoFar = lpcszCmdLineSoFar; strCmdSoFar.TrimLeft(); // vector<string> vsTokens; // int nNumToks = strCmdSoFar.GetTokens( vsTokens ); // if(nNumToks==0) // return -1;// no xf in cmd // string strXFName = vsTokens[0]; // get xf int nSpace = strCmdSoFar.Find(" "); string strXFName = strCmdSoFar.Left(nSpace); if(strXFName.IsEmpty()) return -1; // get arguments strCmdSoFar = strCmdSoFar.Mid(strXFName.GetLength()+1);//skip the xf name strCmdSoFar.TrimLeft(); string strFullpath; okutil_find_file_from_composite_name(&strFullpath, strXFName, SUPPORTFILE_XF); XFunction xf; TreeNode trXF; if(!xf.Load(&trXF, strFullpath, LTXF_BY_FILENAME)) return -1; // it would be faster to compute xf name and XFunction object outside function... // then pass XFunction object reference into this function

SetSize(nNumVars). } } } return nNumRemaining. false. &vsVarParsedValues.IsEmpty() if value can be empty string "" // if(vsVarParsedValues. } ///end AUTOCOMPLETE_XF_CMD_LINE_ARGUMENTS string get_reg_key_name(int nEventID) { switch(nEventID) { case IDXF_USGCTXT_WKS_SEL_CHANGE: return "OnWksSelectionChange". //int nNum = parse_arg_str(lpcszCmdLineSoFar. vsVarTypes). true. &vsVarParsedValues.GetSize(). &vsVarNames.GetSize(). ii < vsVarParsedValues.GetVariables(vsVarNames.IsEmpty())//then not set { if(vsVarDefaultVals[ii].// empty double quotes "" so user can see empty string. int nNumVars = vsVarNames. xf. vsVarDefaultVals. vsVarDefaultVals. &vsVarNamesSet). true. vsVarParsedValues. &vsVarNames. false. case IDXF_USGCTXT_WKS_SEL_TRACKING: . //int nNum = parse_arg_str(lpcszCmdLineSoFar. p_saRemainingArgList->Add(str). if(nNum>=0)//no errors. // now parse vector<string> vsVarParsedValues.IsEmpty()) vsVarDefaultVals[ii]="\"\"". string str = vsVarNames[ii] + XF_VARIABLE_ASSIGNMENT_STR + vsVarDefaultVals[ii]. &vsVarParsedValues.// get xf variables vector<string> vsVarNames. int nNumRemaining=0. vsVarTypes. vsVarNamesSet. including missing values { for(int ii=0. nNumRemaining++. true. &vsVarNamesSet).IsEmpty()) if(vsVarNamesSet[ii].SetSize(nNumVars). ii++) { // cant use vsVarParsedValues. &vsVarNames. false). vsVarNamesSet. int nNum = parse_arg_str(strCmdSoFar.

} } //---- ///end QA70-7500 EVENT_ORGANIZER_DLG ///end LIST_XF string op_get_graph_tag_name(int nIndex) { string str = "Graph". .return "OnWksSelectionTracking".Load(vsFiles[ii])) continue.Output. vector<uint>& vnOldIndices) { Tree trXF. true). use -1 to just get the tagName prefix str += ++nIndex. //--. vector<string>& vsResultNames. ////-------. } TreeNode op_get_optional_tables(TreeNode& trGUI) { return trGUI.nVal) continue. if(nIndex < 0) return str. if (!trNode.Add(vsNames[ii]).Add(ii). return str. create new composite name list by filtering UsageContext check for given XF tree node ID void xf_filter_names(const vector<string>& vsNames. ii < vsNames.CPY ML 3/24/2005 CONTEXT_MENU_X_FUNCS // given composite-name/file-name pair. ii++) { if (!trXF. int nDataID. if(vnOldIndices) vnOldIndices. } error_report("get_reg_key_name found invalid event ID").CPY 12/31/04 PICTURE_IN_REPORT_IN_BASE_CLASS. } return "NotDefined".SetSize(0). TreeNode trNode. const vector<string>& vsFiles. if(vnOldIndices) vnOldIndices. vsResultNames.IsValid() || !trNode. for(int ii = 0. nDataID.Create. trNode = tree_get_node_by_dataid(trXF.GetSize().

CPY 5/4/2006 MOVE_XF_ERR_TO_THROW_MACRO // I also changed all codes in this file that uses this function to error_report /* //Error output in XF void xf_error_out(string strErr) { out_str(""). strErr. } //count missing values in vector int vec_trim_missing(vector& vIn) { int nSize = vIn. wks1.Calibration.GetPage(). } int check_same_worksheet(Worksheet& wks1. return nSize .vIn.GetSize(). Worksheet& wks2) { string strwks1. strwks2. return trGUI. out_str(""). dMax.GetPage(). strErr.GetName(strwks1). dMax). if ( strwks1 != strwks2 ) .} ///Joe 8/28/06 MOVE_GET_X_FROM_Y_OR_GET_Y_FROM_X_TO_CALIBRATION TreeNode op_get_calibration_table(TreeNode& trGUI) { ///Joseph 09/18/06 MOVE_CALIBRATION_TABLE_OUT_OUTPUT //return trGUI. ///End MOVE_CALIBRATION_TABLE_OUT_OUTPUT } ///End MOVE_GET_X_FROM_Y_OR_GET_Y_FROM_X_TO_CALIBRATION //---.Calibration.Output.GetSize().GetName(strwks2). return (dMin == dMax) ? true : false. vIn.Trim().Write( WRITE_MESSAGE_BOX). } */ //check if all elements of vector are equal bool vec_is_equal(const vector& vIn) { double dMin. wks2.GetMinMax(dMin. vIn.Write(WRITE_SCRIPT_WINDOW).

double dMargin. . RoundLimits(&dMin. if(pIncre) *pIncre = log10(dMax / dMin) / *pSteps. } //end CORRECT_SPAN_AXIS_AS_FITX_WHEN_GET_INPUT_FROM_WKS //---. see #8904 for more details dMin -= dMargin.08 here. bool bRound) { if(dMin > dMax) return false.GetIndex() ) return DIFF_WKS. if(bLogScale) { double dMintemp = dMin. } /// Iris 11/21/06 CORRECT_SPAN_AXIS_AS_FITX_WHEN_GET_INPUT_FROM_WKS static bool _get_span_full_axis_min_max_value(double& dMin. dMax = 10^( log10(dMaxtemp) + dtemp ). double dInc. } else { double dtemp = ( dMax . int nPoints = 50) { if(dMin > dMax) return false. bool bLogScale. if(pIncre && !pSteps) return false. double* pIncre.dtemp ). &dInc. double& dMax. else if ( wks1. double dMargin = (dMax .return DIFF_WKB.08.dMin) * 0. dMax += dMargin. else return SAME_WKS.dMin ) * dMargin. return true. dMin = 10^( log10(dMintemp) .GetIndex() != wks2. dMaxtemp = dMax. //For 0. double dtemp = dMargin * log10(dMaxtemp/dMintemp). double& dMax.CPY 10/29/2007 QA70-10599 NLFIT_PREVIEW_UPDATE_CLEANUP /* /// Cheney 2007-7-16 SHOULD_GENERATE_X_DATA_UNIFORMLY_SPACED_IN_LOG_SPACE_WHEN_LOG_TYPE bool get_fitted_curve_min_max(double& dMin. &dMax. int* pSteps. nPoints).

double* pz1. dMax = _log_to_linear(dMax). double& y2. double* pz2) ///end NEED_RESCALE_FOR_3D_FITTINGS { . double& dMax. double& x2. double dMargin. DWORD dwRules. dMax += dtemp.} double get_min_max_inc(double& dMin. // something is wrong return log10(x). dMax = _linear_to_log(dMax). double& y1. &dMax. nSteps). if(bRound && pIncre && pSteps) RoundLimits(&dMin.dMin) / *pSteps. tt). &dMax. dMin -= dtemp. return dInc.dMin -= dtemp. dMax += dtemp. pIncre. double dInc = 0. dMax. if(nSteps) RoundLimits(&dMin. } ///end SHOULD_GENERATE_X_DATA_UNIFORMLY_SPACED_IN_LOG_SPACE_WHEN_LOG_TYPE */ static double _log_to_linear(double x) { if(x <=0) return -10. } double dtemp = (dMax . if(dMin > dMax) { double tt. double& y2. } ///Arvin 11/22/07 NEED_RESCALE_FOR_3D_FITTINGS //bool get_data_range_xy_scale(DataRange& dr. double& x2. } static double _linear_to_log(double x) {return 10^x. DWORD dwRules) bool get_data_range_xy_scale(DataRange& dr. double& x1. } return true. if(pIncre) *pIncre = (dMax . double& x1. &dInc. SWAP(dMin. *pSteps). double& y1.dMin) * dMargin. bool bLogScale. if(bLogScale) dMin = _linear_to_log(dMin). int nSteps) { if(bLogScale) dMin = _log_to_linear(dMin).

nRet).rZ2.rX1)) { string strErr.rZ1.GetPlottingScale(dwRules. double tt. if(z1 > z2) SWAP(z1.IsValid()) return false. y2 = st. y1 = st.z2. if(pz1) *pz1 = z1.tt). /// Hong 04/09/08 QA80-11395 FIX_FIT_CURVE_IN_REPORT_FAIL_RESCALE_WHEN_NO_OVERLAP //if(dp. z2 = st.GetAxesScaleOverlap(&dfx.if(!dr) return false.Format("GetPlottingScale failed %d". // dwRules = DRR_GET_DEPENDENT | DRR_NO_FACTORS.tt).y2. if(pz2) *pz2 = z2.end NLFIT_PREVIEW_UPDATE_CLEANUP ///Arvin 11/09/07 NLFIT_REPORT_GRAPH_NEED_RESCALE_WHILE_PART_GRAPH_SELECTION_AS_INPUT bool is_plot_requir_rescale(DataPlot& dp. &dfy). dfy. ///end NEED_RESCALE_FOR_3D_FITTINGS } return true. int nRet = dr. int nOption) { if(!dp.rX2. DRPLOTTINGSCALE st = {0}. double dfx. error_report(strErr).rY1.tt). &st). //---. } x1 = st. &dfy) < 0) // return false.x2. if(nRet != 0 || is_missing_value(st. return false. x2 = st. } if(y1 > y2) { SWAP(y1.rY2. strErr.GetAxesScaleOverlap(&dfx.rX1. . } ///Arvin 11/22/07 NEED_RESCALE_FOR_3D_FITTINGS double z1 = st. if(x1 > x2) { SWAP(x1. int nRet = dp.

bool bSetCustomInput) ///end PREVIEW_RANGE_SHOULD_BE_SOUCE_GRAPH_OR_DATA_RANGE_WHEN_RANGE_TYPE_IS_SPAN_T O_FULL_AXIS_RANGE { ///Cheney 2006-11-22 SHOULD_ALWAYS_USE_INPUT_Y_TO_GET_RESIDUAL //switch(fitOptions. if( (nOption & RESCALE_SHRINK_X) && (dfx < 0. bool bSetCustomInput) ///end SHOULD_ALWAYS_USE_INPUT_Y_TO_GET_RESIDUAL */ ///Arvin 04/12/07 v8. const FitResultCurveDataOptions& fitOptions. const FitResultCurveDataOptions& fitOptions. vector& vFit. so I choose 1. const GraphLayer& gl. double* pdAxisMax.3 as threshold in this case if( (nOption & RESCALE_SHOW_ALL) && ((dfx > 1. bool bSetCustomInput) bool get_data_by_fitted_curve_options(const vector& vData.3) || (dfy > 1.8) ) bNeedRescale = true. bool bSortFitData. const FitResultCurveDataOptions& fitOptions. } ///end NLFIT_REPORT_GRAPH_NEED_RESCALE_WHILE_PART_GRAPH_SELECTION_AS_INPUT /// Iris 11/21/06 CORRECT_SPAN_AXIS_AS_FITX_WHEN_GET_INPUT_FROM_WKS /* ///Cheney 2006-11-22 SHOULD_ALWAYS_USE_INPUT_Y_TO_GET_RESIDUAL //bool get_data_by_fitted_curve_options(const vector& vData. /// end FIX_FIT_CURVE_IN_REPORT_FAIL_RESCALE_WHEN_NO_OVERLAP bool bNeedRescale = false.if ( nRet < 0 ) return false.3)) ) bNeedRescale = true.0599 PREVIEW_RANGE_SHOULD_BE_SOUCE_GRAPH_OR_DATA_RANGE_WHEN_RANGE_TYPE_IS_SPAN_T O_FULL_AXIS_RANGE by cp's suggestion //bool get_data_by_fitted_curve_options(const vector& vData. const GraphLayer& gl.DataType : FIT_CURVE_SAME_AS_DATA. const FitResultCurveDataOptions& fitOptions. double* pdAxisMin. vector& vFit. double* pdAxisMin. vector& vFit. vector& vFit. bool bSortFitData. switch(nDataType) ///end SHOULD_ALWAYS_USE_INPUT_Y_TO_GET_RESIDUAL . bool bSortFitData) bool get_data_by_fitted_curve_options(const vector& vData. return bNeedRescale.DataType) int nDataType = bSetCustomInput? fitOptions. if( (nOption & RESCALE_SHRINK_Y) && (dfy < 0. double* pdAxisMax.08. bool bSortFitData. //Two side margin are 0. double* pdAxisMin. double* pdAxisMax. if ( GORR_NO_OVERLAP == nRet ) return true.8) ) bNeedRescale = true.

FIT_CURVE_UNIFORM_LOG == fitOptions. ////after call this func. &dInc. &dMax. dInc. /// Cheney 2007-7-16 SHOULD_GENERATE_X_DATA_UNIFORMLY_SPACED_IN_LOG_SPACE_WHEN_LOG_TYPE switch(fitOptions..0.dMin ) * fitOptions. //return directly case FIT_CURVE_UNIFORM_LINEAR: case FIT_CURVE_UNIFORM_LOG: break. will get large error //if(dAbsoluteRangeMargin > 0. dMax.0. MATREPL_TEST_LESSTHAN | MATREPL_TEST_EQUAL). if(FIT_CURVE_UNIFORM_LOG == fitOptions. fitOptions.Range) { case FIT_CURVE_RANGE_MARGIN: /// Cheney 2007-7-16 SHOULD_GENERATE_X_DATA_UNIFORMLY_SPACED_IN_LOG_SPACE_WHEN_LOG_TYPE //double dAbsoluteRangeMargin = ( dMax .N .{ case FIT_CURVE_SAME_AS_DATA: vFit = vData. //dMin -= dAbsoluteRangeMargin. } double dMin.N). vTemp./// Iris 11/21/06 CORRECT_SPAN_AXIS_AS_FITX_WHEN_GET_INPUT_FROM_WKS int nSteps = fitOptions.GetMinMax(dMin.CPY 7/15/06 LR_FIT_USE_SOURCE_X_DATA_SHOULD_SORT_FIRST if(bSortFitData && vFit. &dInc.Sort(). //dMax += dAbsoluteRangeMargin. dMax).DataType). ///end IF_LOG_SCALE_SHOULD_REPLACE_ALL_VALL_LESS_AND_EQUAL_0 //RoundLimits(&dMin.RangeMargin / 100. if input x is small: 1e-10.N).GetMinMax(dMin. ////if use log scale.. fitOptions.) /////end COMPARE_TO_O75_SHOULD_NOT_MODIFY_MINX_AND_MAXX_VAL //RoundLimits(&dMin.Replace( 0.CPY 10/29/2007 QA70-10599 NLFIT_PREVIEW_UPDATE_CLEANUP //get_fitted_curve_min_max(dMin. dmin will become 0. vector vTemp. /////Cheney 2007-7-13 COMPARE_TO_O75_SHOULD_NOT_MODIFY_MINX_AND_MAXX_VAL ////otherwise.. //--return true.RangeMargin / 100. NANUM.GetSize()>0 ) vFit. //--. &dMax. . dMax). ///Cheney 2007-9-19 QA70-10399-P2 IF_LOG_SCALE_SHOULD_REPLACE_ALL_VALL_LESS_AND_EQUAL_0 //vData.1. 1e-9. default: break.DataType) vTemp./// Iris 11/21/06 CORRECT_SPAN_AXIS_AS_FITX_WHEN_GET_INPUT_FROM_WKS //---. vTemp = vData. dMax. fitOptions.

dMax. ///end SHOULD_GENERATE_X_DATA_UNIFORMLY_SPACED_IN_LOG_SPACE_WHEN_LOG_TYPE } ///end CORRECT_SPAN_AXIS_AS_FITX_WHEN_GET_INPUT_FROM_WKS break. fitOptions.X. default: break.08. FIT_CURVE_UNIFORM_LOG == fitOptions.0599 PREVIEW_RANGE_SHOULD_BE_SOUCE_GRAPH_OR_DATA_RANGE_WHEN_RANGE_TYPE_IS_SPAN_T O_FULL_AXIS_RANGE by cp's suggestion else if(pdAxisMin && pdAxisMax) { dMin = *pdAxisMin.Min. dMax. //if( pdAxisMax ) dMax = *pdAxisMax. //---. dMax = fitOptions.N). 0.X. dMax = *pdAxisMax. dMax. dMax.From.RangeMargin / 100. case FIT_CURVE_CUSTOM: dMin = fitOptions. case FIT_CURVE_SPAN_AXIS: /// Iris 11/21/06 CORRECT_SPAN_AXIS_AS_FITX_WHEN_GET_INPUT_FROM_WKS //if( pdAxisMin ) dMin = *pdAxisMin. //---///end SHOULD_GENERATE_X_DATA_UNIFORMLY_SPACED_IN_LOG_SPACE_WHEN_LOG_TYPE break. if(gl) { dMin = gl. } ///end PREVIEW_RANGE_SHOULD_BE_SOUCE_GRAPH_OR_DATA_RANGE_WHEN_RANGE_TYPE_IS_SPAN_T O_FULL_AXIS_RANGE else { /// Cheney 2007-7-16 SHOULD_GENERATE_X_DATA_UNIFORMLY_SPACED_IN_LOG_SPACE_WHEN_LOG_TYPE //_get_span_full_axis_min_max_value(dMin.0).To. } ///Arvin 04/12/07 v8.DataType). FIT_CURVE_UNIFORM_LOG == fitOptions. break.DataType). dMax = gl. fitOptions. FIT_CURVE_UNIFORM_LOG == fitOptions.CPY 10/29/2007 QA70-10599 NLFIT_PREVIEW_UPDATE_CLEANUP //get_fitted_curve_min_max(dMin. .DataType.Max. get_min_max_inc(dMin.get_min_max_inc(dMin.

vFit. // Log transform of inc=(endbegin)/(npts-1) vFit.Data(log10(dMin).jj < vecTemp. bool bSort = true. vector<double> vecTemp.ii < vec. log10(dMax). bool bRemoveRepeat = true) { if( NULL == vec || NULL == str) return -1.dMin) / nSteps. vFit = 10^vFit. jj. int iSize. } else { ///Cheney 2007-7-16 SHOULD_GENERATE_X_DATA_UNIFORMLY_SPACED_IN_LOG_SPACE_WHEN_LOG_TYPE if(nDataType == FIT_CURVE_UNIFORM_LOG) { dInc = log10(dMax / dMin) / nSteps. } else ///end SHOULD_GENERATE_X_DATA_UNIFORMLY_SPACED_IN_LOG_SPACE_WHEN_LOG_TYPE { dInc = (dMax . for(ii = 0. string& str. /// Kevin 08/29/05 MODIFY_VECTOR_TOSTRING /// Kevin 08/18/05 ADD_VECTOR_TO_STRING /*int VectorToStr(const vector<double> &vec. dInc).GetSize(). dInc). jj++) .SetSize(1). dMax.} if(0 == nSteps) ///Jasmine 05/04/08 USE_MIN_VALUE_IF_FIT_CURVE_POINT_IS_ONE { vFit. int ii. if( bRemoveRepeat ) { vecTemp.Add( vec[0] ).GetSize().Data(dMin. } } } return true. vFit[0] = dMin. ii++) for(jj = 0.

bool bRemoveRepeat. vector<string> vsList.Add( vec[ii] ). } return strList. =true.if( vecTemp[jj] != vec[ii] ) { vecTemp. index++) { string strTemp = vTemp[index]. if(bRemoveRepeat && ( -1 != vsList. ii++) { str += (string) vecTemp[ii]. } /// End MODIFY_VECOTR_TO_STRING . index<vTemp. vTemp = vec.Sort().GetSize(). chSeparator). ='|' { if( NULL == vec || NULL == strList) return -1. }*/ /// End ADD_VECTOR_TO_STRING int vector_to_str_list(const vector &vec.GetSize() > 0 ) vTemp. char chSeparator) //=true.GetSize(). str += '|'. return iSize.ii < iSize . str = "". if( bSort && vTemp. } } //end if if( bSort ) vecTemp.SetTokens(vsList.Find(strTemp)) ) continue. string &strList.Sort(). iSize = vecTemp. bool bSort. for(int index. for(ii = 0. vsList.1. } str += vecTemp[ii]. vector vTemp. break.Add(strTemp).

0382 QA70-8187 GET_DATA_VALUES_DIRECT_FROM_IMAGE /* if( bOfferConversion ) { if( IDYES == MessageBox(NULL. } */ /// end GET_DATA_VALUES_DIRECT_FROM_IMAGE error_report("MatrixObject has no data"). } } /// end XF_MATRIX_OBJECT_CENTRALIZED_DATA_CONVERSION /// ML 1/13/2006 XVARIABLEBASE_TO_VC /* //---. bool bFound = LT_get_str(strName. string& strVal) { char szBuffer[100*MAXLINE]. . 100*MAXLINE). "MatrixObject has no data. return bFound. return false. return false.CPY 10/10/05 CENTRALIZED_XF_ARG_QUOTE_HANDLING // return CCS_LTVAR_SUBSTITUDED if LT str variable and converted return true. MB_YESNO) ) { // this is temp solution until I write proper function in MatrixObject MatrixLayer ml. "Convert image to data". } //---//---.SetViewImage(FALSE) && ml.HasData() ) { /// EJP 2006-03-29 v8.CPY 10/4/05 QA70-7895 STR_VAR_FROM_XF_ALLOW_NO_$ static bool _get_LT_str(const string& strName. szBuffer[0] = '\0'.IsValid() ) { error_report("MatrixObject is invalid").SetViewImage()). } if( !mObj. strVal = szBuffer. BOOL bOfferConversion) { if( !mObj.GetParent(ml). szBuffer.\nWould you like to convert image to data?". return (ml. } return false. mObj./// YuI 08/23/05 XF_MATRIX_OBJECT_CENTRALIZED_DATA_CONVERSION bool check_matrix_has_data(MatrixObject& mObj.

string& str. return CCS_QUOTE_REMOVED. bool bDollarEnded = false.GetLength() > 1 && strVal[0] == '"') // quoted { strVal. if(bFound) { str = strTemp. string strVar = strVal. return CCS_LTVAR_SUBSTITUDED. if('$' == nChar) { strVar = strVal. } bool bFound = false. } int nPos. either because not ended with $ and bCheckLTstrVarOnlyIfDollarEnded=false. } else if(bCheckLTstrVarOnlyIfDollarEnded) { str = strVal. bool bCheckLTstrVarOnlyIfDollarEnded) { if(strVal. or bCheckLTstrVarOnlyIfDollarEnded but not ended with $ and not a LT str variable // int check_cvt_str(const string& strVal. return bGoodCname?CCS_NO_CHANGE_GOOD_C_NAME : CCS_NO_CHANGE. } .TrimLeft('"'). if(is_good_C_identifier(strVar)) { string strTemp. bFound = _get_LT_str(strVal. int nChar = str_end_char(strVal.Left(nPos). } bGoodCname = true. return is_good_C_identifier(str)? CCS_NO_CHANGE_GOOD_C_NAME : CCS_NO_CHANGE. } str = strVal. &nPos). bDollarEnded = true. str = strVal. strTemp). bool bGoodCname = false. if( bDollarEnded ) return CCS_ERR_LTVAR_NOT_FOUND.// return CCS_QUOTE_REMOVED if quoted and quotes removed // return CCS_ERR_LTVAR_NOT_FOUND if $ ended but not found to be LT str variable // return CCS_NO_CHANGE if no change. strVal.TrimRight('"').

/// Cloud 8/30/07 ADD_OPTION_OF_NOISE int nSize = vSignal.end //---. NULL. case NOISE_OVER_DIFFERENCE: double dMin. sd = 1 //vTemp *= ave * level/100. NULL. int type) /// End ADD_OPTION_OF_NOISE { //add gaussian noise with standard deviation vector vTemp. dMax.GetSize(). NULL.Normal(nSize).. &nTemp). /// End ADD_OPTION_OF_NOISE vSignal += vTemp. } /// Cloud 8/30/07 ADD_OPTION_OF_NOISE //ave = dSum/nTemp. switch (type) { case NOISE_OVER_AVERAGE: /// End ADD_OPTION_OF_NOISE vTemp = abs(vSignal). double level) void add_white_noise(vector& vSignal.GetSize(). double base. } vTemp. base = dSum/nTemp. vector& vY. break.//---. vector& vZ) { if(!wks) . NULL. return. dMax). double level.GetMinMax(dMin. if(nTemp < 1) { out_str("no valid data"). double dSum = ocmath_d_sum(vTemp. // mean = 0.. vTemp *= base * level/100.CPY 10/10/05 CENTRALIZED_XF_ARG_QUOTE_HANDLING */ /// end XVARIABLEBASE_TO_VC ///----Frank 11/23/05 ADD_NOISE_AND_SAVE_DATA /// Cloud 8/30/07 ADD_OPTION_OF_NOISE //void add_white_noise(vector& vSignal. } void save_data_2_Wks(Worksheet& wks. int nTemp.dMin. break. vector& vX. vTemp. base = dMax . vSignal.

dsX = vX. } //---. ALL_USER_FOLDER. 0). 2). } } return false. if (vZ == NULL) { wks.CPY 12/12/05 NLF_FUNCTIONS_NEEDED_IN_PCH //----------------------------------------------------------// the following nlf functions are needed in various PCH and // must be compiled into Origin. dsZ = vZ.0654 NEED_USE_THIS_FUNCTION_IN_FITNL //static bool nlf_find_category(LPCSTR lpcszFunc. ORIGIN_PATH_SYSTEM}. string& strCategory. } .} ///----End ADD_NOISE_AND_SAVE_DATA return.SetSize(-1. dsY(wks. Dataset dsX(wks.SetColDesignations("XY"). 0). dsX = vX. string& strCategory.GetSize(). strCategory. int& nFolderLocation) bool nlf_find_category(LPCSTR lpcszFunc. dsY(wks. wks.SetSize(-1. ii++) { nType = vnFolderTypes[ii]. 1). } else { wks. 2). SYS_FOLDER} vector<int> vnFolderTypes = {ORIGIN_PATH_USER. ii < vnFolderTypes. dsY = vY.h //----------------------------------------------------------///Arvin 07/06/07 v8. int& nFolderLocation) ///END NEED_USE_THIS_FUNCTION_IN_FITNL { if(nFolderLocation < 0) { // vector<int> vnFolderTypes = {USER_FOLDER.SetColDesignations("XYZ"). dsY = vY. Dataset dsX(wks. ORIGIN_PATH_GROUP. return true. for(int ii = 0. int nType. 1). if(nlf_find_category(lpcszFunc. dsZ(wks. nType)) { nFolderLocation = vnFolderTypes[ii]. 3). wks.

ii++) { if(0 == vsSections[ii]. vector<string> vsCategorys.GetSectionNames(vsSections). INIFile iniNLSF(strFilename). // really not possible int nNumCate = iniNLSF. .string strFilename = nlf_get_ini_filepath(nFolderLocation). nCateIndex++) { int nNumFunctions = iniNLSF. for(int nFuncIndex=0. } //CPY 1/19/06 if nIniPathAsDefault != ORIGIN_PATH_UNDEF. } if(ii == nNumSection) return false. then we will turn on searching and start from this location static bool nlf_get_fdf_filepath(string& strFilePath. const string& strCategoryName. ii<nNumSection. vsSections[ii]).CompareNoCase("Category")) break.CompareNoCase(lpcszFunc)) { strCategory = vsCategorys[nCateIndex].IsFile()) return false. !bFind && nFuncIndex<nNumFunctions.IsFile()) return false. vsCategorys[nCateIndex]). const string& strFunctionName. nFuncIndex++) { if(0 == vsFunctions[nFuncIndex]. // string strFuncFileName.GetKeyNames(vsFunctions. vector<string> vsFunctions. //search function by function name bool bFind = false. } } } return false. if(!strFilename. int& nFolderLocation. for(int ii=0. return true. int nInILocation. int nNumSection = iniNLSF. for(int nCateIndex=0. INIFile iniNLSF(strINIFilename). !bFind && nCateIndex<nNumCate.GetKeyNames(vsCategorys. vector<string> vsSections. if(!strINIFilename. int nIniPathAsDefault) { ///Jasmine 09/26/07 SEARCH_THE_RIGHT_NLSF_INI /* string strINIFilename = nlf_get_ini_filepath(nInILocation).

Add(nInILocation).fdf".//nInILocation if(!strINIFilename. strFuncFileName). ALL_USER_FOLDER. nFolderLocation = nIniPathAsDefault != ORIGIN_PATH_UNDEF? nIniPathAsDefault: USER_FOLDER. //if(bSearch && !strFilePath. string strGroup = okutil_get_origin_path(ORIGIN_PATH_GROUP.IsFile()) if(bSearch && !bExists) { nFolderLocation = ALL_USER_FOLDER.IsEmpty()) break. STR_FDF_FILE_FOLDER).Format(strFormatFDF. get_origin_path(nFolderLocation. SYS_FOLDER}. strFunctionName).IsEmpty()) vnINIs. } if(strFuncFileName. if(nFolderLocation < 0 || nIniPathAsDefault != ORIGIN_PATH_UNDEF) { bSearch = true. STR_FDF_FILE_EXT). if(!strFuncFileName. strFuncFileName). STR_FDF_FILE_FOLDER. strFunctionName). // strFilePath.Add(ALL_USER_FOLDER). } // string strFormatFDF = "%s%s. ///Jasmine 09/30/07 QA70-10462 CHECK_REPLACE_USER_FITTING_FUNCTION_TO_GROUP_ONE //look function in group folder only when the client's user function is not found vector<int> vnINIs. vnINIs.ReadString(strCategoryName. if(USER_FOLDER == nInILocation && !strGroup.Format(strFormatFDF. */ string strFuncFileName. ii++) { int nINI = vnINIs[ii]. STR_FDF_FILE_FOLDER). // strFilePath.ReadString(strCategoryName. ///End SEARCH_THE_RIGHT_NLSF_INI bool bSearch = false. . //vnINIs. ///End CHECK_REPLACE_USER_FITTING_FUNCTION_TO_GROUP_ONE for(int ii = 0. bool bExists = okutil_make_file_path(strFilePath.IsFile()) continue. INIFile iniNLSF(strINIFilename). string strINIFilename = nlf_get_ini_filepath(nINI). nInILocation).string strFuncFileName = iniNLSF. strFuncFileName.//return false.IsEmpty()) return false. strFuncFileName = iniNLSF.// ={USER_FOLDER. TRUE).GetSize(). get_origin_path(nFolderLocation. ii < vnINIs. nFolderLocation. NULL.InsertAt(0.

else strIniFilePath = okutil_get_origin_path(nIniPath. string strIniFilePath. ""). } //-----.Empty().Format(strFormatFDF.CPY 04/07/08 QA80-11384 FIX_GET_FDF_FULL_PATH_WITHOUT_CATEGORY /* // return the fdf filename without path and without ext //lpcszFuncName = "system:Gauss". get_origin_path(nFolderLocation. } ///End SOME_FUNCTIONS_ARE_NOT_IN_SYSTEM_OR_USER_FILE_FOLDER return false. ""). STR_FDF_FILE_FOLDER. nFolderLocation. // strFilePath. // if( !strFilePath. return true.IsFile()) { strFilePath = strFuncFileName.IsFile() ) if( !bExists ) { nFolderLocation = SYS_FOLDER.IsEmpty() && bAddNLSFini) strIniFilePath+="NLSF. if (nIniPath == ORIGIN_PATH_USER75) get_origin75_user_path(strIniFilePath). strFuncFileName.bExists = okutil_make_file_path(strFilePath. STR_FDF_FILE_EXT). //-----. ///Jasmine 07/25/07 SOME_FUNCTIONS_ARE_NOT_IN_SYSTEM_OR_USER_FILE_FOLDER if(strFuncFileName. this mean system path only //lpcszFuncName = "Gauss". nFolderLocation. return strIniFilePath. STR_FDF_FILE_FOLDER. strFuncFileName).IsFile()) if(!bExists) { strFilePath. } string nlf_get_ini_filepath(int nIniPath. } } // if(!strFilePath. then AllUser then System . strFuncFileName. bExists = okutil_make_file_path(strFilePath. STR_FDF_FILE_EXT).Folger 09/03/07 CORRECTLY_GET_ORIGIN75_PATH //string strIniFilePath = okutil_get_origin_path(nIniPath. bool bAddNLSFini)//=true { //-----.ini". this mean that we need to find from User. } return true.End CORRECTLY_GET_ORIGIN75_PATH if(!strIniFilePath. STR_FDF_FILE_FOLDER).

}*/ bool nlf_get_fdf_filename(LPCSTR lpcszFuncName. if(nPathType >= 0 && nPathINI >= 0) nPathType = nPathINI. // this is caller's repsonsibility if(strCategory. // ini file in a diff location found the more appropriate location for loading function } if(nPathINI < 0) //nPathINI = USER_FOLDER. ASSERT(!strFuncName. strCategory. string* lpstrCategory. string strCategory = lpstrCategory? *lpstrCategory : "".// ini path must be specified string strFullpath. int* pnFolder. &nPathType). nPathINI))// may find the ini that has the category return strEmpty. string* pstrCategory. // indicate unknown string strFuncName = okutil_separate_composite_name(lpcszFuncName. strFuncName. if(!nlf_get_fdf_filepath(strFullpath. true). strCategory.IsEmpty()).// ini path must be specified ///Jasmine 09/04/07 SHOULD_TRY_TO_GET_INI_PATH_FROM_NPATHTYPE ///Jasmine 09/07/07 but when nPathType == SYS_FOLDER. return GetFileName(strFullpath. if(pnFolder) *pnFolder = nPathType. string* pstrFullpathFDFfile) // = NULL =NULL = NULL).string nlf_get_fdf_filename(LPCSTR lpcszFuncName. int nPathINI = ALL_USER_FOLDER == nPathType? nPathType : -1. nPathINI. nPathType. int nPathType = -1.IsEmpty()) { . string* pstrFullpathFDFfile) // = NULL =NULL = NULL).IsEmpty()) { if(!nlf_find_category(strFuncName. should use USER_FOLDER instead nPathINI = (nPathType < 0 || SYS_FOLDER == nPathType)? USER_FOLDER : nPathType. ///Jasmine 10/08/07 QA70-10462 SEEK_SHARED_FDF_IN_SHARED_NLSF_INI //int nPathINI = -1. nPathINI)) return strEmpty. int nPathINI = ALL_USER_FOLDER == nPathType? nPathType : -1. // indicate unknown string strFuncName = okutil_separate_composite_name(lpcszFuncName. if(pstrFullpathFDFfile) *pstrFullpathFDFfile = strFullpath. int nPathType = -1. int* pnFolder. &nPathType). { string strEmpty. if(pstrCategory) strCategory = *pstrCategory. { string strCategory. ///End SEEK_FOR_SHARED_FDF_IN_SHARED_NLSF_INI if(strCategory.

GetSize(). should use USER_FOLDER instead nPathINI = (nPathType < 0 || SYS_FOLDER == nPathType)? USER_FOLDER : nPathType. return true.Sort(SORT_ASCENDING. TRUE. vnIndex). vector vIndex. vector &vGroup.if(!nlf_find_category(strFuncName. if(nPathType >= 0 && nPathINI >= 0) nPathType = nPathINI. strCategory. vector& vNumInGroup) { int nSize = vstrGroup. if(pnFolder) *pnFolder = nPathType.end CPY 04/07/08 QA80-11384 FIX_GET_FDF_FULL_PATH_WITHOUT_CATEGORY ///Echo 1/5/05 STR_GROUP_TO_NUM bool get_group(vector<string> &vstrGroup. if(!nlf_get_fdf_filepath(strFullpath.Data(1.// ini path must be specified ///Jasmine 09/04/07 SHOULD_TRY_TO_GET_INI_PATH_FROM_NPATHTYPE ///Jasmine 09/07/07 but when nPathType == SYS_FOLDER. nSize). . nPathINI))// may find the ini that has the category return BOOL_ERR("NLSF category not found for " + strFuncName). strCategory. if (nSize == 0) return false.Reorder(vnIndex). vGroup[0] = nGroup. vGroup. vstrGroup. } //---.SetSize(nSize). nPathINI. nPathINI)) return BOOL_ERR("NLSF FDF not found " + strFuncName). // ini file in a diff location found the more appropriate location for loading function } if(nPathINI < 0) //nPathINI = USER_FOLDER. nPathType. int nGroup = 1. vector<uint> vnIndex. vIndex. vector<string> &vstrLab.// ini path must be specified string strFullpath. if(pstrFullpathFDFfile) *pstrFullpathFDFfile = strFullpath. vIndex. strFuncName.

Reorder(vnIndex). vNumInGroup[nGroup-1] = vNumInGroup[nGroup-1]+1. return true. ii < nSize-1. vNoDuplic = vDataCpy. vDataCpy = vData. vstrGroup. } vIndex.GetSize().Add(0). } ///Echo 1/5/05 TRIM_DULP_IN_VEC void trim_duplicated_value(vector& vData.Sort(). ii++) { if (vDataCpy[ii] == vDataCpy[ii+1]) vNoDuplic[ii+1] = NANUM. vstrLab[0] = vstrGroup[0].GetSize() <= 0) return. vNumInGroup[0] = 1. vector& vNoDuplic) { ///*** Terminate if error setting if (vData.. ii++) { if(vstrGroup[ii] != vstrGroup[ii+1]) { nGroup++. } vGroup[ii+1] = nGroup. vstrLab.Reorder(vnIndex). } vNoDuplic. vGroup. for (int ii =0.1. vNumInGroup. TRUE.Sort(SORT_ASCENDING.Trim().vstrLab.Add(vstrGroup[ii+1]). int nSize = vDataCpy. for (int ii = 0. vNumInGroup. } ///Arvin 11/16/07 QA70-10676 UPDATE_MEANS_COMPARISION_PLOT_LEGENT_WITH_SIG_INFO . /// *** To avoid the changing of vData vector vDataCpy.SetSize(1). vnIndex). ii < nSize .SetSize(1). vDataCpy.

int* nRowEnd) { if (!dr) return false. c2.GetSize() < 1) { vUniqueValues.Add(vData[index]). nSumCols += nCols. r2. string strRange.Add(index). c1.GetNumCols() . return vUniqueIndices. kk < dr. int* nRowBegin.nIndex > 0.GetRange(kk. vector vUniqueValues. nUniqueStart = 0. ///END HANDLE_WHOLE_WORKSHEET_SELECTED int nCols = c2-c1+1. } } index++. vUniqueValues. vector<int>& vUniqueIndices) { int nDataSize = vData. wksTemp. vUniqueIndices. r1. const int nIndex. if(nFind <= 0 && vIndex. while(index < nDataSize) { vector<uint> vIndex. Column& col. if (wksTemp) { ///Echo 5/20/08 v8.Add(nUniqueStart). kk++) { dr. .GetSize().Add(vData[nUniqueStart]). if(nDataSize < 1) return -1.SetSize(0). r2. int r1. int index = 1.GetSize().Find(vIndex. bool bColInRange = nSumCols . &strRange). c1. vUniqueIndices. int nFind = vUniqueValues.0867 HANDLE_WHOLE_WORKSHEET_SELECTED if (c2 == -1) c2 = wksTemp. Worksheet wksTemp. for (int kk = 0. int nSumCols = 0.GetNumRanges().1. } ///end UPDATE_MEANS_COMPARISION_PLOT_LEGENT_WITH_SIG_INFO bool range_get_col(const DataRange& dr.int find_unique_values(const vector& vData. vUniqueIndices. c2. vData[index]).

const int nIndex) { string strName = "".GetInternalData() } return -1. ii < dr. Worksheet wksTemp. nIndex. } return strName. Column col. if (col) { strName = col. } ///Echo 3/13/06 GET_COL_NAME_FROM_RANGE string range_get_col_name(const DataRange& dr. c2.} } return true.GetName(). } ///Arvin 11/16/06 GET_FIRST_Y_COL_FROM_RANGE int get_first_y_col_index(const DataRange& dr) { int r1. if (nRowBegin) *nRowBegin = r1.GetNumRanges(). } if ( bColInRange ) { col = wksTemp. for( int ii = 0. range_get_col(dr.GetLongName(). if (nRowEnd) *nRowEnd = r2. col). c1. col).Columns(c1 + (nIndex-nSumCols+nCols)). if (col) { return col. } int range_get_col_type(const DataRange& dr. if (!lstrcmp(strName. r2. "")) strName = col. nIndex. const int nIndex) { Column col. break. string strRange. ii++) { . Column col. range_get_col(dr.

dr. wksTemp. ID_BY_PARAMETER. r2. c2. string range_get_col_id(const DataRange& dr.GetRange(0. const int nParamIndex) { int r1. c2. ID_BY_SHORT_NAME. break. if(c1 == c2 && wksTemp. }. col = wksTemp. c2. nParamIndex).GetName(). case ID_BY_SHORT_NAME: strName = col. r2. c1. string strRange. break. &strRange).GetRange(ii. c1. case ID_BY_PARAMETER: col. . &strRange). break. ID_BY_COMMENT. Column col. Worksheet wksTemp.GetLongName(). col = wksTemp. r2. &strRange). c1.Columns(c1). wksTemp. c2. if (1 == dr. const int idRule. c1.Columns(c1) } string strName. r1.Columns(c1 + nIndex) }else { dr.GetNumRanges()) { dr. ID_BY_UNITS. case ID_BY_COMMENT: strName = col. switch(idRule) { case ID_BY_LONGNAME: strName = col. wksTemp. r1. r1. const int nIndex. r2.} ///end GET_FIRST_Y_COL_FROM_RANGE ///Sandy 2007-7-20 REMOVE_NO_USE /* ///Sandy 2006-6-7 GET_COL_ID_WITH_DIFFERENT_ID enum { ID_BY_LONGNAME = 0.GetType() == OKDATAOBJ_DESIGNATION_Y) return ii. } return -1.GetParameter(strName.GetRange(nIndex.GetComments(). break.

ii++) { if (ii != vnIndex[jj]) vstrTmp.GetUnits(). break.case ID_BY_UNITS: strName = col.GetSize()) return false. } */ static void _remove(const vector<uint>& vnIndex. vData. } return true. vector<string> &vsGroup1. vector<uint> vnIndex.GetSize()-1) ? jj+1 : jj. } vstrData = vstrTmp. } bool trim_nanum_1group(vector &vData. for (int ii = 0. int& nMissing) { if (vData.GetSize()) return false. ii < vstrData.Add(vstrData[ii]).GetSize() || vsGroup1.GetSize() != vsGroup2.GetSize() != vstrGroup.GetSize() != vsGroup1. vector<string> &vsGroup2.GetSize(). default: break. . } return strName. vstrGroup). vector<string>& vstrData) { vector<string> vstrTmp. vector<string> &vstrGroup. NANUM). nMissing = vData. } bool trim_nanum_2group(vector &vData. vector<uint> vnIndex. if (nMissing > 0) { _remove(vnIndex. int jj = 0.Find(vnIndex. int& nMissing) { if (vData.Trim(). else jj = (jj < vnIndex.

//} //if(vGroup != NULL) //{ //vGroup.nMissing = vData. if(vstrGroup) { for(int ii=0. . jj = 0.ii++) { mData. nMissing = 0. _remove(vnIndex.GetNumRows().GetSize().RemoveAt(ii . if (nMissing > 0) { _remove(vnIndex. //} //} // //mat. //} static int _trim_matrix(matrix& mData.ii++) //{ //mat. vsGroup2). ///Eric 06/3/22 //int trim_matrix(matrix &mat.GetRow(vTemp. // //return nMissing.Trim(). jj = 0. //int nTemp. vector &vCensor. vData.ii<mat. vTemp))) //{ //nMissing += nTemp.GetNumRows(). //} //jj++. int nTemp.jj). nMissing = 0. } } return true.jj). //if(vCensor != NULL) //{ //vCensor.ii). vector<string> &vGroup) //{ //vector vTemp. //if(0 < (nTemp = ocmath_count(NANUM. // //for(int ii=0.RemoveAt(ii . NANUM).ii). vsGroup1). vTemp.GetRow(vTemp.ii<mData.RemoveEmptyRows(false). vector<string>& vstrGroup) { vector vTemp.Find(vnIndex.

} int trim_nanum_sa(int* nMissing.jj).GetSize() != vCensor. vector &vTime. //} //} // //if(mCovariat != NULL) //{ //if(vTime. vTemp))) { nMissing += nTemp.GetSize(). jj++.if(0 < (nTemp = ocmath_count(NANUM.GetSize() != mCovariat.GetSize() != vstrGroup. nMissing = 0 < ocmath_count(NANUM. } mData. vector &vCensor. //} // //if(vstrGroup != NULL) //{ //if(vTime. } } else { } mData. .Replace(0. matrix &mCovariat.Replace(0. if (bWeibull) vTime. vTemp. MATREPL_TEST_LESSTHAN).RemoveAt(ii .GetSize().GetAsVector(vTemp). int nTime = vTime.RemoveEmptyRows(false). vTemp). NANUM.GetSize()) //{ //return OE_INVALID_SIZE. //} //} ///value in time range that are less than 0 should be excluded vTime. const bool bWeibull) { //if(vTime.GetNumRows()) //{ //return OE_INVALID_SIZE. vstrGroup. vector<string> &vstrGroup.GetSize()) //{ //return OE_INVALID_SIZE.GetSize(). MATREPL_TEST_EQUAL). vTemp. return nMissing. NANUM.

mTemp. ii++). 2 + mCovariat.GetSize().GetSize(). nSize). if(mTemp. nCensor).GetNumRows(). nSize). mTemp.SetSubMatrix(mCovariat. } else if(vTime != NULL) { mTemp. int nSize = min(nTime. if( vTime ) { mTemp. 1).SetColumn(vTime. mTemp. 2).GetColumn(vTime. 1).SetSize(nSize.GetNumCols()).SetColumn(vTime. if(mCovariat) { mTemp. 0). nSize = min(nCovar.SetColumn(vCensor. } if( vCensor ) { . mTemp.SetSize(nSize. 0). if (vstrGroup) { int nGroup = vstrGroup.int nCensor = vCensor. *nMissing += _trim_matrix(mTemp. *nMissing = 0. nSize = min(nGroup. } int ii. 2). } if (mCovariat) { int nCovar = mCovariat. } else { ii = 0. } matrix mTemp. vstrGroup).SetColumn(vCensor. mTemp.GetNumRows() == 0) { return OE_RANGE_ZERO.

GetSubMatrix(mCovariat. int nLevel = 1.Add(lpsczLabel). } } vector vSubGroup(nSize). const int nSize. ii++) { if (0 == lstrcmp(lpsczLabel. }else if (ii == nGroup-1) { vstrGroup.GetColumn(vCensor. int nGroup = vstrGroup.Add(lpsczLabel).GetSize()) //return false. vector<string>& vstrGroup. vector<string> &vstrGroup1.GetSize() != vstrGroup. vector<string> &vstrGroup2) { . vector& vData. ///Echo 4/19/06 static bool _anova2_update_group(vector& vGroup. } } } return OE_NOERROR. ii++). } for (int ii=0. ii<nGroup. if( mCovariat ) { mTemp. vector& vGroup1.GetSize(). LPCSTR lpsczLabel) { //if (vNumInGroup.} mTemp. } return true. nLevel = nGroup+1.Append(vSubGroup). int get_anova2_data(const Range& rInGroup. vSubGroup = nLevel. vector& vGroup2. break. vGroup. if (nGroup == 0) { vstrGroup. ii). vstrGroup[ii])) { nLevel = ii+1.

NULL. &vstrFactor. vector& vData. vstrGroup2. if (!_anova2_update_group(vGroup1. ii++ ) { vector vTmpData. nSize. ii++ ) { vector vTmpData. vData. if (vstrLab) vstrLab. if (vstrFactor.Append(vTmpData). for ( int ii=0. ii.DWORD dwRules = DRR_NO_WEIGHTS. NULL. NULL.Add(nSize). } if ( vData. &vTmpData. if ( nSize == 0) break. vstrFactor[0]) || !_anova2_update_group(vGroup2. ii< nNumData. vNumInGroup.GetNumData(dwRules).GetSize() != 2) return CER_NOT_INDEX_DATA. int nSize = vTmpData. vstrGroup1.GetSize() == 0 ) return CER_NO_DATA. &vW). vW.Add(vstrFactor[0]). vector<string> &vstrLab) { DWORD dwRules = DRR_NO_WEIGHTS. int nSize = vTmpData. &vW). NULL. &vstrFactor. rInGroup. NULL. NULL. if (1 > nNumData) return CER_NOT_INDEX_DATA. rInGroup. vstrFactor[1])) return CER_NOT_INDEX_DATA } if ( vData.GetSize(). int nNumData = rInGroup. int nNumData = rInGroup. NULL. vData. } int get_data_in_group(const Range& rInGroup.GetSize().GetNumData(dwRules).GetData(dwRules.Append(vTmpData).GetData(dwRules. if (1 > nNumData) return CER_NOT_INDEX_DATA. vector<string> vstrFactor.GetSize() == 0 ) return CER_NO_DATA. ii< nNumData. ii. nSize. NULL. if ( nSize == 0) continue. . for ( int ii=0. &vTmpData. vector<string> vstrFactor. vector& vNumInGroup. vW.

return CER_NO_ERROR.GetData(vData1. vector& vData1. &vW).Add(vstrFactor[0]). . vector& vData1. &vData2. NULL. int& nMissing. if (1 > nRange || nRange > 2) return CER_NOT_RAW_DATA. NULL. int nSize2 = vData2. 1. return CER_NO_ERROR. if (2 != nNumData) return CER_GROUP_NUM_NOT_TWO. if (vstrLab) vstrLab. vector<string> vstrFactor. vector vData. 0.GetSize() == 0 || vData2. rInGroup.GetNumData(dwRules). NULL. &vstrFactor. rInVar. &dwPlotID. 0. NULL.GetData(dwRules. rInVar. 0).GetNumRanges(). int nNumData = rInGroup. NULL. } int get_2_data_in_group(const Range& rInGroup. 1. //rInVar.GetData(dwRules. NULL. int& nMissing. &vstrFactor. vector& vData2. 1). DWORD dwRules = DRR_NO_WEIGHTS | DRR_NO_FACTORS . NULL. //rInVar. &dwPlotID.Add(vstrFactor[0]). int nSize1 = vData1.GetData(vData2. const bool bPair) { int nRange = rInVar.GetSize(). NULL.if (2 > nNumData) return CER_GROUP_NUM_LS_2. vector<string>& vstrLab. const bool bPair) { DWORD dwRules = DRR_NO_WEIGHTS. &vData1). &vData2). } int get_2_data_in_var(const Range& rInVar. NULL.GetData(dwRules. rInGroup. if (vData1.GetSize(). vector& vData2. DWORD dwPlotID. if (vstrLab) vstrLab.GetSize() == 0) return CER_NO_DATA.GetData(dwRules. vW. vector<string>& vstrLab. &vData1. &vW). NULL. if (1 > nNumData) return CER_NOT_INDEX_DATA.

SetColumn( vData1.GetColumn( vData1. 1). ii < 2. mData. } return CER_NO_ERROR. nSize1. ii).RemoveEmptyRows(false). mData.SetSize(2). &str)) . 0). if (!bPair) { if (0 < nMissing) { vData1. } }else { matrix mData(min(nSize1. mData.Trim().if (nSize1 == 0 || nSize2 == 0) return CER_NO_DATA. nMissing = ocmath_count(NANUM. mData. nSize2). mData.2).SetColumn( vData2. } if (vstrLab != NULL) { vstrLab.GetColumn( vData2. ERROR_TITLE. } bool error_msg_box(LPCSTR lpcszErrMsg) { MessageBox(NULL. string &str) { if(!ocu_load_err_msg_str(nErrCode. 1). lpcszErrMsg. 0). MB_OK | MB_ICONSTOP). vData2). vData2. if ( bPair && nSize1 != nSize2) return CER_DATA_DIFF_SIZE.Trim(). return false. ii++) vstrLab[ii] = range_get_col_name(rInVar. } ///---Sim 08-22-2006 ERR_MSG_SUPPORT_PARAM static bool _load_error_msg_str(int nErrCode. nSize2. vData1) + ocmath_count(NANUM. for (int ii = 0.

///---Sim 08-22-2006 ERR_MSG_SUPPORT_PARAM /* if(!ocu_load_err_msg_str(nErrCode. ///---Sim 08-22-2006 ERR_MSG_SUPPORT_PARAM bool error_msg_box(int nErrCode. &str)) str. int nParam) { string str. str. str).Format("Error code %d not defined. please report this problem. str) ) { string strTemp = str. if ( _load_error_msg_str(nErrCode. LPCSTR lpcszParam) { string str. str.". nParam). LPCSTR lpcszParam) { string str.{ } ///---END ERR_MSG_SUPPORT_PARAM bool error_msg_box(int nErrCode) { string str. */ _load_error_msg_str(nErrCode. please report this problem. nErrCode).Format(strTemp. ///---END ERR_MSG_SUPPORT_PARAM } return error_msg_box(str).". } bool error_msg_box(int nErrCode. } return error_msg_box(str). str) ) { return error_msg_box(str). return false.Format(strTemp. . str) ) { string strTemp = str. int nParam. } return true. nErrCode). if ( _load_error_msg_str(nErrCode. } } bool error_msg_box(int nErrCode. if ( _load_error_msg_str(nErrCode.Format("Error code %d not defined. lpcszParam). str.

nParam). str.Format(strTemp.Format(_FORMAT_. bool bIcon) int show_warning_msg(int nErrCode. //Setting message parameter return error_msg_box(str). _Param) \ int nIndex = strErrMsg. if ( nErrCode < 0 ) { if ( NULL != lpcszErrMsg ) strErrMsg = lpcszErrMsg. UINT uType) // = 'W'. LPCSTR lpcszParam. int nParam. \ } ///---Sim 06-19-2007 IMPROVE_WARNING_MESSAGE_POST_FUNCTION //int show_warning_msg(int nErrCode. } } ///---END ERR_WARNING_MSG_SUPPORT_PARAM ///---Sim 08-23-2006 WARNING_MSG_BOX #define SETTING_WARN_STR_PARAM(_FORMAT_. str). int nParam. nParam. MB_OK { //Load message string strErrMsg = "". \ strErrMsg. str) ) { string strTemp = str. _Param).string strTemp = str.Format(strTemp. char cMsgType. lpcszParam). LPCSTR lpcszParam. strErrMsg). } else _load_error_msg_str(nErrCode. //else //strErrMsg = "Warning message string is NULL. \ if ( nIndex >= 0 ) \ {\ string str. bool bShowBox.Delete(nIndex.Find(_FORMAT_). bool bCancelButton. \ str. .". if ( _load_error_msg_str(nErrCode. } return error_msg_box(str). LPCSTR lpcszParam. LPCSTR lpcszErrMsg. LPCSTR lpcszErrMsg.Insert(nIndex. int nParam) { string str. bool bOKButton. lpcszParam. str. strlen(_FORMAT_)). } bool error_msg_box(int nErrCode. \ strErrMsg.

} int warning_msg_box(int nErrCode. lpcszErrMsg. bShowBox. bIcon). TRUE { return show_warning_msg(-1. nParam. bIcon). // Not defined return value yet. okoc_out_msg(strErrMsg. bool bShowBox. \ } } /* int warning_msg_box(LPCSTR lpcszErrMsg. } int warning_msg_box(LPCSTR lpcszErrMsg. bool bIcon)// = FALSE . bCancelButton. TRUE { return show_warning_msg(-1. bool bIcon)// = FALSE . } if ( NULL != lpcszParam ) { SETTING_WARN_STR_PARAM("%s". uType). NULL. bCancelButton. _L(ERROR_TITLE). cMsgType).if ( WARN_NPARAM_UNUSED != nParam ) { SETTING_WARN_STR_PARAM("%d". bool bShowBox. bool bCancelButton. int nParam. //out_str(strErrMsg). strErrMsg. bool bCancelButton. NULL. ERROR_TITLE. TRUE { . /// ML 7/31/2007 QA70-10137 MUST_SUPPLY_PARENT_FOR_MESSAGEBOX //return MessageBox(NULL. bool bIcon)// = FALSE . lpcszParam). strErrMsg. WARN_NPARAM_UNUSED. } //Open message box //if ( bOKButton || bCancelButton) if ( bShowBox ) { //UINT uType = ( bOKButton ? MB_OK : 0 ) | ( bCancelButton ? MB_OKCANCEL : 0 ) | ( bIcon ? MB_ICONSTOP : 0 ) . bool bCancelButton. return MessageBox(GetWindow(). bShowBox. return 0. uType). bool bShowBox. nParam). lpcszErrMsg. /// end MUST_SUPPLY_PARENT_FOR_MESSAGEBOX } else { // no dialog //LT_execute("ty -aa").

bool bIcon)// = FALSE . bool bIcon)// = FALSE . } int warning_msg_box(LPCSTR lpcszErrMsg. MB_OK { return show_warning_msg(nErrCode. nParam. NULL. uType). UINT uType) // = 'W'. char cMsgType. MB_OK { return show_warning_msg(-1. bool bCancelButton. bool bCancelButton. bool bIcon)// = FALSE . LPCSTR lpcszParam. bIcon). bIcon). uType). lpcszErrMsg. NULL. lpcszParam. bShowBox. WARN_NPARAM_UNUSED. bCancelButton. bCancelButton. bIcon). WARN_NPARAM_UNUSED. bool bShowBox. nParam. WARN_NPARAM_UNUSED. lpcszParam. cMsgType. int nParam. MB_OK { return show_warning_msg(-1. bIcon). } int warning_msg_box(int nErrCode. char cMsgType. NULL. bShowBox. } */ int warning_msg_box(LPCSTR lpcszErrMsg. lpcszParam. bShowBox. bool bShowBox. int nParam. bool bShowBox. lpcszErrMsg. NULL. NULL. TRUE { return show_warning_msg(nErrCode. TRUE { return show_warning_msg(nErrCode. TRUE { return show_warning_msg(nErrCode. bool bCancelButton. bCancelButton. bShowBox. WARN_NPARAM_UNUSED. bool bShowBox. NULL. bCancelButton. bShowBox. char cMsgType. UINT uType) // = 'W'. bool bCancelButton. } . cMsgType. bool bShowBox. nParam. } int warning_msg_box(int nErrCode.return show_warning_msg(nErrCode. } int warning_msg_box(int nErrCode. bShowBox. uType). } int warning_msg_box(int nErrCode. bool bShowBox. int nParam. bCancelButton. TRUE { return show_warning_msg(nErrCode. NULL. NULL. bShowBox. bIcon). int nParam. cMsgType. NULL. NULL. LPCSTR lpcszParam. bShowBox. NULL. bool bShowBox. nParam. LPCSTR lpcszParam. } int warning_msg_box(int nErrCode. bool bIcon)// = FALSE . UINT uType) // = 'W'.

NULL. bShowBox.RemoveAll(). char cMsgType. UINT uType) // = 'W'. bShowBox. } int warning_msg_box(int nErrCode. bool bShowBox. nParam. UINT uType) // = 'W'. cMsgType. uType). int nParam. nData = nMin. WARN_NPARAM_UNUSED. LPCSTR lpcszParam. int nInc) { if(nInc <= 0 || nMax < nMin) return false. for(int index = 0. lpcszParam. MB_OK { return show_warning_msg(nErrCode. NULL. char cMsgType. uType). cMsgType. uType). bShowBox. int nParam.Add((string)nData). uType). int nMax. bool bShowBox.int warning_msg_box(int nErrCode. LPCSTR lpcszParam. NULL. bool bShowBox. UINT uType) // = 'W'. } int warning_msg_box(int nErrCode. } int warning_msg_box(int nErrCode. MB_OK { return show_warning_msg(nErrCode. NULL. char cMsgType.IsValid()) . index++. nData <= nMax. cMsgType. vsList. bool bShowBox. bShowBox. MB_OK { return show_warning_msg(nErrCode. nData+=nInc) { vsList. lpcszParam. LPCSTR lpcszParam. int nParam. } ///---END IMPROVE_WARNING_MESSAGE_POST_FUNCTION ///---END WARNING_MSG_BOX /// Iris 8/16/06 NEED_SPECIAL_ROW_COL_HEADER_FOR_MATRIX_TABLE bool str_data_list(vector<string>& vsList. NULL. char cMsgType. nParam. lpcszParam. } ///end NEED_SPECIAL_ROW_COL_HEADER_FOR_MATRIX_TABLE ///Arvin 9/01/06 ADD_WKS_IS_ALL_COLS_DOUBLE bool wks_is_all_cols_double(const Worksheet& wks) { if(!wks. } return true. cMsgType. UINT uType) // = 'W'. MB_OK { return show_warning_msg(nErrCode. int nMin. nParam.

if(internalType == FSI_TEXT) return true. if(internalType != FSI_DOUBLE) return false. } ///end ADD_WKS_IS_ALL_COLS_DOUBLE ///Arvin 9/28/06 ADD_WKS_HAS_TEXT_COL bool wks_has_text_col(const Worksheet& wks) { if(!wks.Columns) { int internalType = col. foreach(Column col in wks.GetInternalData().Columns) { int internalType = col. if((nIndex = strDescription.GetInternalData(). int nIndex. .IsValid()) return false. } ///end ADD_WKS_HAS_TEXT_COL ///Arvin 9/28/06 ADD_WKS_HAS_COMPLEX_COL bool wks_has_complex_col(const Worksheet& wks) { if(!wks. return. } ///end ADD_WKS_HAS_COMPLEX_COL ///Joseph 10/9/06 SEPERATE_DESCIPTION_OR_COMBINE_DESCRIPTION_SECTION void xf_seperate_description(LPCSTR lpcszDesp.return false. string& strMenu. if(internalType == FSI_COMPLEX) return true.Columns) { int internalType = col. } return false. foreach(Column col in wks. } return true. string& strBriefDesp) { string strDescription(lpcszDesp). } return false.IsValid()) return false. foreach(Column col in wks.GetInternalData().Find(":"))<0) { strBriefDesp = strDescription.

vector<int>& vColLower.1). ii < nSize. break. ii++) { if(nIndex <= vUpper[ii]) { nPos += nIndex . ii < nSize. string& strDescriptoin) { string strMenu(lpcszMenu). vector<int>& vColUpper) { if(nCol < 0) return false.GetLength() . int nIndex. nPos += vUpper[ii] . } ///End SEPERATE_DESCIPTION_OR_COMBINE_DESCRIPTION_SECTION ///---Sim 12-05-2007 REWRITE_WKS_DATA_RANGE_TO_MAT /* ///Cheney 2006-10-11 GET_DATA_FROM_DR_TO_MAT static void _get_right_pos_to_set_sub_mat(int& nPos. vector<int>& vLower. string strBrief(lpcszBriefDesp). ii++) { nBounds += vUpper[ii] .vLower[ii] + 1. .vLower[ii] + 1. } } */ ///---END REWRITE_WKS_DATA_RANGE_TO_MAT bool is_column_in_bounds(int nCol. strDescriptoin = strMenu + ":" + strBrief. LPCSTR lpcszBriefDesp. for(int ii = 0.} strMenu = strDescription.Right(strDescription.nIndex . vector<int>& vUpper) { int nSize = vLower. } void xf_combine_description_section(LPCSTR lpcszMenu. } } } static void _get_matrix_bound(int& nBounds.vLower[ii].GetSize().GetSize().Left(nIndex). strBriefDesp = strDescription. for(int ii = 0. vector<int>& vLower. vector<int>& vUpper) { int nSize = vLower.

vr2. r2Temp. if(!dr. //int nRanges = wks. c2. c2. ///Cheney 2006-11-6 USE_DR_GETRANGE_BUT_NOT_WKS. . vrUpper.Add(r1Temp). avoid to make a large matrix //vector<int> vr1. } return false. int nRanges = dr. wks)) return false.Add(r2Temp).GetRange(0. } ///end USE_DR_GETRANGE_BUT_NOT_WKS vector<int> vrLower. r2. r1. c2 = wks. c1. int r1. vc1.GetNumRanges(). vc1. //get source worksheet Worksheet wks. c1. if(nData < 0) return false. vc2). vc2.GetSize(). ii < nSize. ////get bounding. c2). ii < nRanges. because when user changed the selection in worksheet //with interactive button.GetNumData(). c2Temp. ii++) { if(nCol >= vColLower[ii] && nCol <= vColUpper[ii]) return true. vr1.GetSelectedRange(vr1. ii++) { int r1Temp. r2. for(int ii = 0. if(!dr. c1Temp. c1.1. c1Temp. vc1. wks.int nSize = vColLower.GetNumCols() . GetSelectedRange will change. for(int ii = 0. } ///---Sim 12-05-2007 REWRITE_WKS_DATA_RANGE_TO_MAT /* bool get_data_from_dr_to_mat(const DataRange& dr. vr2.GetRange(r1. vc2.Add(c2Temp). r2Temp. vc2.Add(c1Temp). vector<int> vr1. wks)) return false. vc1. r1Temp. r2. c2Temp. vr2. vr2.GetRange(ii. so it is not reliable. //get wks' row and col number c1 = 0. matrixbase& mat) { int nData = dr.

ii++)///Echo 7/31/07 QA70-8679 P11 R2M_AUTO_UPDATE_NOT_SUPPORT_SELETED_RANGE for(ii = 0. vcUpper. nSubMatColBegin. vcUpper). r2)) //return false. } */ bool get_data_from_dr_to_mat(const DataRange& dr. } return true. vrLower. c2. ii++) { if(!dr.GetData(mTemp. . bool bRemoveMissingRows. r1. if(nData < 0) return false. c2.CopyFromWks(wks. vcLower. c1. _get_matrix_bound(nMatCols. vrUpper). bool bRemoveMissingCols)//= true. but vcLower's size may be not equal to vrLower's int nMatRows = 0. mat. c1. true { int nData = dr. 0. c1. ii). if(!get_select_range_bounds(vc1. vector<int> vcLower. r1. dr. vrUpper)) return false. _get_right_pos_to_set_sub_mat(nSubMatRowBegin. //set matrix data //for(ii = 0. r1. wks)) return false. _get_right_pos_to_set_sub_mat(nSubMatColBegin. vcUpper). vc2. vrLower. ii < nRanges.SetSize(nMatRows. r2. nMatCols = 0.GetNumData().if(!get_select_range_bounds(vr1. vcLower. mat = NANUM. ii < nData. //if(!mTemp. c2. ///END R2M_AUTO_UPDATE_NOT_SUPPORT_SELETED_RANGE //get right position to set sub matrix int nSubMatColBegin = 0. vrUpper). //notes:vcLower's size should equal to vcUpper's.GetRange(ii. ///Echo 7/31/07 QA70-8679 P11 R2M_AUTO_UPDATE_NOT_SUPPORT_SELETED_RANGE matrix mTemp. nSubMatRowBegin). mat.SetSubMatrix(mTemp. vcLower. matrixbase& mat. nMatCols). vrLower. _get_matrix_bound(nMatRows. r2. nSubMatRowBegin = 0. vr2. vcUpper)) return false.

1 || uIDs[ii] != uIDs[ii+1] ) // next worksheet is different { if(!dr. } // calculate total size of all sub ranges int nMatRows = 0. vnIndeces)) return false. but dr will not. c1. ///end USE_WRONG_FUNCTION_TO_GET_DATA_IN_STATS_ON_ROWS } // uIDs will be sorted. r2. ///---Sim 12-20-2007 QA80-10842 FIX_NOT_USE_ALL_ROWS //if ( wks.GetNumRanges(). ///---Sim 01-17-2007 QA80-10952 MULTI_SHEET_RANGE /* // sort sub ranges by worksheet vector<uint> uIDs. c1. vnIndeces is such index vector for get unsorted sub ranges info. vector<uint> vnIndeces. vnIndeces. ///---END FIX_INVOKE_WKS_GET_BOUND int ii.GetSize().Add(0). wks)) // only wks used return false. ii++ ) { if ( ii == uIDs. will failed to sort. r2. c2. nMatCols = 0.GetUID()). c1. nBoundR2. ///---Sim 12-20-2007 QA80-10842 FIX_NOT_USE_ALL_ROWS int nBoundR1. //int r1. ///---Sim 01-17-2007 QA80-10952 MULTI_SHEET_RANGE remove ///---Sim 04-28-2008 FIX_INVOKE_WKS_GET_BOUND //int nBoundR1. ii < dr. c2.GetSize() > 1 ) { if(!uIDs.GetRange(vnIndeces[ii]. ii++ ) { if(!dr. for ( ii = 0. r1. ///Arvin 12/13/07 USE_WRONG_FUNCTION_TO_GET_DATA_IN_STATS_ON_ROWS as Echo said //need check wks valid first //uIDs.GetSize() .GetUID()).GetRange(ii. wks)) // only wks used return false. r1. true.Worksheet wks. nBoundR2. } else { // size is 1. r2. if(wks) uIDs. for ( ii = 0.Sort(SORT_ASCENDING.GetNumRows() > nMatRows ) . nBoundC1. nBoundC2.Add(wks. c2.Add(wks. if ( uIDs. ii < uIDs.

wks) ) { if ( wks ) { ///---Sim 04-28-2008 FIX_INVOKE_WKS_GET_BOUND //if ( wks.GetSize().GetNumCols(). SheetRangesList sdl(dr). } */ int nWksIndex. nBoundC2) ) { if ( nBoundR2 + 1 > nMatRows ) nMatRows = nBoundR2 + 1. // fill data of sub range to matrix int nColOffset = 0. for ( ii = 0. // multi sheet data would be put horizontally by cols int nSubMatRowBegin = r1. int nSubMatColBegin = c1 + nColOffset. nMatCols). nBoundR2.GetRange(nWksIndex. mat. nSubMatRowBegin). mat = NANUM. int nWksSubRangeIndex. c2. if ( wks. nBoundC2) ) . nBoundR2. nWksIndex < sdl. 0.SetSubMatrix(matSub.1 || uIDs[ii] != uIDs[ii+1] ) // next worksheet is different nColOffset += wks.GetNumSheets(). nBoundC1. if ( ii == uIDs. // calculate total size of all sub ranges int nMatRows = 0.GetBounds(nBoundR1.GetNumCols(). ii < uIDs.GetRange(vnIndeces[ii]. nMatCols = 0. ii++ ) { if(!dr.GetSize() . nSubMatColBegin. nBoundC1. nWksIndex++ ) { if ( sdl.SetSize(nMatRows. dr.GetData(matSub. matrix matSub. for ( nWksIndex = 0. r2. c1.//nMatRows = wks. r1. vnIndeces[ii]). } ///---END QA80-10842 FIX_NOT_USE_ALL_ROWS } } mat. nMatCols += wks. wks)) return false.GetNumRows().GetBounds(nBoundR1.

} nMatCols += wks. wks. // temp variable. 0. matrix matSub. wks) ) { if ( wks ) { Datasheet ds. nWksSubRangeIndex)).GetSubRangeIndex(nWksIndex.r1. not used ORANGE rg.SetSubMatrix(matSub. nBoundR2. nSubMatRowBegin). nMatCols). // multi sheet data int nSubMatRowBegin = rg.} } } if ( wks.GetBounds(nBoundR1. } } ///---END QA80-10952 MULTI_SHEET_RANGE // remove missing rows if ( bRemoveMissingRows ) . &rg) ) return false.SetSize(nMatRows.GetNumSubRange(nWksIndex). nWksIndex < sdl.GetNumSheets().GetRange(nWksIndex. for ( nWksIndex = 0.GetNumCols().GetNumCols(). for ( nWksSubRangeIndex = 0. would be put horizontally by cols int nSubMatColBegin = rg. // fill data of sub range to matrix int nColOffset = 0. mat = NANUM. nSubMatColBegin. } } nColOffset += wks. dr. mat. nWksSubRangeIndex++ ) { if( !sdl. nWksSubRangeIndex < sdl.GetRange(nWksIndex. nWksSubRangeIndex.GetNumCols()-1) ) ///---END FIX_INVOKE_WKS_GET_BOUND { if ( nBoundR2 + 1 > nMatRows ) nMatRows = nBoundR2 + 1.c1 + nColOffset. sdl. 0. nWksIndex++ ) { if ( sdl. mat. ds.GetData(matSub.

Add(v1[0]). vector<int>& vUpper) { int nSize = v1. vUpper. if(!v1. if(v1[ii] > vUpper[nIndex] + 1) { vLower.Add(v1[ii]). ii++) v2[ii] = v2[ii] == -1 ? nBound : v2[ii]. for(ii = 1. from 1st to last. } vLower. ii < nSize. return true. vector<int>& vLower. . if(nSize != v2.Reorder(vnIndeces)) return false.mat. vUpper. if(nSize > 1) { vector<uint> vnIndeces.Transpose().Sort(SORT_ASCENDING. if(!v2. true. vector<int>& v2. if(nSize == 0) // no range selected return false. ii++) { if(vLower. mat.Add(v2[0]). break. mat.RemoveEmptyRows().Add(v2[ii]). // remove missing cols if ( bRemoveMissingCols ) { mat. int nIndex = 0.RemoveEmptyRows(). for(int ii = 0. } } ///---END REWRITE_WKS_DATA_RANGE_TO_MAT bool get_select_range_bounds(vector<int>& v1. uint nBound. vnIndeces)) return false.GetSize()) return false.Transpose().GetSize(). ii < nSize.GetSize() == 1 && vLower[0] == 0 && vUpper[0] == nBound) // no need to loop.

lpcszAttribVal). } ///end GET_DATA_FROM_DR_TO_MAT /// YuI 12/07/04 v7. } if(v1[ii] == vUpper[nIndex] + 1) { vUpper[nIndex] = v2[ii]. strTag. continue.c int make_one_set_ID(int idBase. strTag. } /// end PICTURE_IN_REPORT_IMPROVEMENT TreeNode check_add_enumerated_node(TreeNode &trParent. lpcszPrefix. int nId = nDataIndex << 16. } } return true.Format("%s%d". index). nId |= IDST_MASK_ONE_SET.nIndex++.5172 PICTURE_IN_REPORT_IMPROVEMENT // this function will not create node if it is not there TreeNode get_enumerated_node(TreeNode &trParent. LPCSTR lpcszAttribVal)//=-1. index). int nID. return trParent.NULL { string strTag. LPCTSTR lpcszPrefix. TreeNode tr = tree_check_get_node(trParent. int index. int nDataIndex) { //++nDataIndex. strTag. } if(v1[ii] <= vUpper[nIndex]) { if(v2[ii] > vUpper[nIndex]) vUpper[nIndex] = v2[ii]. idBase &= 0xffff. lpcszPrefix. continue. return tr.Iris 10/17/06 moved from wksOperatio. LPCTSTR lpcszPrefix. int index) { string strTag. lpcszAttrib.GetNode(strTag). nId |= idBase. } /// end REPORT_TABLE_MULTIHEADERS_MORE_WORK //--. nID.Format("%s%d". LPCSTR lpcszAttrib. return nId. .NULL.

75. 50.} //--///Arvin 10/17/06 ADD_IS_PERCENTILE_VALUE // use dPercent = 0 and provide pPercentiles to get list int is_percentile_value(double dPercent.GetSize(). 10. if(nPercentiles != nPercents) return -1. for( int ii = 0. 25.GetSize() && nPerc < 0) { vPercentiles. vector& vPercentiles) { int nPercents = vPercents. int nPercentiles = vPercentiles. 90. vector* pPercentiles) // =NULL { static vector l_vPercents = {1. 99}. 5.RemoveAt(ii). for(int ii = nPercents-1. } ///end USER_SETTED_PERCENTILES_SHOULD_NOT_EFFECT_HISTOGRAM_AND_BOX_CHART_GRAPHS /// Iris 11/13/06 DO_TWO_TIMES_SEP_REPORT_IN_NLFIT_AND_OPBASE return -1. if(ii < vPercentiles. ii++ ) { if( dPercent == l_vPercents[ii] ) { return ii. ii > -1. nTrimedPerc++. } } return nTrimedPerc. if(pPercentiles) *pPercentiles = l_vPercents. 95. ii < l_vPercents.GetSize(). int nTrimedPerc = 0. . ii--) { int nPerc = is_percentile_value(vPercents[ii]).GetSize(). } } } ///end ADD_IS_PERCENTILE_VALUE ///Arvin 10/19/07 USER_SETTED_PERCENTILES_SHOULD_NOT_EFFECT_HISTOGRAM_AND_BOX_CHART_GRAPHS as max said int trim_custom_percentiles(const vector vPercents.

and //then use the new created data range to plot graph. ii++) { vsDest. vr. vector vr. But. when we use it //plot graph of a data range. } return true. GraphLayer& gl.Add(vs[ii]).1. //we judge column's real type first. int nPlotType. int nEnd) { int nSize = vs. ///Cheney 2007-9-7 SHOULD_ALLOW_DUP_DATASET_PLOT_FOR_PREVIEW_SO_AS_TO_SAME_WITH_FIT_THINGS //int plotDataRange(DataRange& dr. so in this function. vector<string>& vsDest. if ( !vValue. } ///end DO_TWO_TIMES_SEP_REPORT_IN_NLFIT_AND_OPBASE ///---Sim 12-08-2006 GET_LARGEST_GAP int find_largest_gap(const vector& vValue) { int nSize = vValue. vsDest. if(-1 == nEnd) nEnd = nSize . vnIndeces). else if ( 2 == nSize ) return 0. for(int ii=nStart. see #7026 bool get_sub_string_vector(const vector<string>& vs. if(nStart >= nSize || nEnd >= nSize) return false.Sort(SORT_DESCENDING.GetSize(). } ///---END GET_LARGEST_GAP #ifdef NLFTI_PARAMETERS_MANAGER //cpy 9/12/08 added this ///Arvin 12/29/06 SET_W_AS_ED_OF_DATARANGE_AND_PLOT_ERROR_BAR //The function AddPlot determines column type by tag name of corresponding subrange. and create a new data range named by column's real type. if ( 2 > nSize ) return -1.GetSize(). uint nCntrl) .RemoveAll(). vector<uint> vnIndeces. int nStart. ii<=nEnd.//GetSubVector not support get sub string vector now.Difference(vr) ) return -1. return vnIndeces[0]. sometimes the name is no real name of column. FALSE.

nRange < dr. bool bIsWeight = 0 == lstrcmpi(strRangeName. uint nCntrl. wksTemp).GetRange(nRange. c2. c1. int nPlotType. wksTemp. wksTemp.Folger 03/18/08 QA80-11260 FIX_RUNTIME_ERROR_IN_MATRIX_FITTING_PREVIEW_AND_REPORT_GRAPH //if(c1 >= 0 && c1 <= c2) if(wksTemp && c1 >= 0 && c1 <= c2) //-----{ if(bIsWeight) { Column col(wksTemp. . c2. r2. r1. "W")? true : false. ///end SHOULD_NOT_PLOT_WEIGHT_UNLESS_COL_TYPE_IS_ERR string strRange = strRangeName. c2). c1. //Range is not empty //-----. dr. GraphLayer& gl. c1.int plotDataRange(DataRange& dr. r1. change range name from "W" to "ED" and then plot graph with error bar if( col && col. c1). r2.GetType() == OKDATAOBJ_DESIGNATION_ERROR) strRange = _L("ED"). &strRangeName). r1. string strRangeName.GetNumRanges(). r2. } } drSubRange. //If column is error.Add(strRange. ///Sophy 9/2/2008 CLEAN_NLFCURVE_ADDPLOT_CODE /* int nStartPlot = -1. c2. DataRange drSubRange.GetRange(nRange. bool bCheckDup) ///end SHOULD_ALLOW_DUP_DATASET_PLOT_FOR_PREVIEW_SO_AS_TO_SAME_WITH_FIT_THINGS { if(!gl || !dr) return -1. r2.Folger 03/18/08 QA80-11260 FIX_RUNTIME_ERROR_IN_MATRIX_FITTING_PREVIEW_AND_REPORT_GRAPH if ( !drSubRange ) drSubRange = dr. /// Iris 02/13/2007 v8. nRange++) { int c1.0560 SHOULD_NOT_PLOT_WEIGHT_UNLESS_COL_TYPE_IS_ERR //dr. for(int nRange=0. r1. Worksheet wksTemp. } */ // No need to change subrange string now ///end CLEAN_NLFCURVE_ADDPLOT_CODE //-----.

Worksheet wksTemp.GetSize() >0 ) ///end SHOULD_ALLOW_DUP_DATASET_PLOT_FOR_PREVIEW_SO_AS_TO_SAME_WITH_FIT_THINGS nPlot = vnPlotIndices[0].//-----///Arvin 08/29/07 QA70-10073-P6 KEEP_CUSTOMIZATION_SETTINGS_FOR_SOURCE_DATA_PLOT_AFTER_RECALCULATE //int nPlot = gl. vnPlotIndices) > 0 && vnPlotIndices. ///Cheney 2007-9-7 SHOULD_ALLOW_DUP_DATASET_PLOT_FOR_PREVIEW_SO_AS_TO_SAME_WITH_FIT_THINGS //if( check_has_plotted_in_graph(drSubRange.AddPlot(drSubRange. else nPlot = gl. wksTemp). c1. nCntrl). c2. DataRange drSubRange. "W")? true : false. &strRangeName).GetRange(nRange. int nPlotType. r2. nCntrl).GetSize() >0 ) if( bCheckDup && check_has_plotted_in_graph(drSubRange. wksTemp.Folger 03/18/08 QA80-11260 FIX_RUNTIME_ERROR_IN_MATRIX_FITTING_PREVIEW_AND_REPORT_GRAPH //if(c1 >= 0 && c1 <= c2) . r2. nPlotType. r1. nPlotType. gl. vector<int> vnPlotIndices. GraphLayer& gl. //Range is not empty //-----. c1. dr. c2. int nPlot = -1.GetNumRanges().0560 SHOULD_NOT_PLOT_WEIGHT_UNLESS_COL_TYPE_IS_ERR //dr. } ///end SET_W_AS_ED_OF_DATARANGE_AND_PLOT_ERROR_BAR #else //#ifdef NLFTI_PARAMETERS_MANAGER //cpy 9/12/08 added this int plotDataRange(DataRange& dr. bool bCheckDup) ///end SHOULD_ALLOW_DUP_DATASET_PLOT_FOR_PREVIEW_SO_AS_TO_SAME_WITH_FIT_THINGS { if(!gl || !dr) return -1. ///end KEEP_CUSTOMIZATION_SETTINGS_FOR_SOURCE_DATA_PLOT_AFTER_RECALCULATE return nPlot. for(int nRange=0. nRange++) { int c1. r1. nRange < dr. ///end SHOULD_NOT_PLOT_WEIGHT_UNLESS_COL_TYPE_IS_ERR string strRange = strRangeName. r1. string strRangeName. gl. r2. uint nCntrl. /// Iris 02/13/2007 v8. int nStartPlot = -1. bool bIsWeight = 0 == lstrcmpi(strRangeName. vnPlotIndices) > 0 && vnPlotIndices.GetRange(nRange.AddPlot(drSubRange. c2.

//-----///Arvin 08/29/07 QA70-10073-P6 KEEP_CUSTOMIZATION_SETTINGS_FOR_SOURCE_DATA_PLOT_AFTER_RECALCULATE //int nPlot = gl. } } //-----. ///Cheney 2007-9-7 SHOULD_ALLOW_DUP_DATASET_PLOT_FOR_PREVIEW_SO_AS_TO_SAME_WITH_FIT_THINGS //if( check_has_plotted_in_graph(drSubRange. wksTemp.AddPlot(drSubRange. gl.GetType() == OKDATAOBJ_DESIGNATION_ERROR) ///Kyle 09/24/08 SET_NO_NEED_TO_LOCALIZE //strRange = _L("ED"). else nPlot = gl. gl.AddPlot(drSubRange. c1). nPlotType. vnPlotIndices) > 0 && vnPlotIndices. strRange = "ED". nPlotType. vector<int> vnPlotIndices. vnPlotIndices) > 0 && vnPlotIndices. c2). } #endif //#ifdef NLFTI_PARAMETERS_MANAGER //cpy 9/12/08 added this //---------. ///End SET_NO_NEED_TO_LOCALIZE } drSubRange. nCntrl).GetSize() >0 ) ///end SHOULD_ALLOW_DUP_DATASET_PLOT_FOR_PREVIEW_SO_AS_TO_SAME_WITH_FIT_THINGS nPlot = vnPlotIndices[0].CPY 1/8/07 SPARK_LINES_BROKEN_FOR_MIXED_COL .Add(strRange. int nPlot = -1. ///end KEEP_CUSTOMIZATION_SETTINGS_FOR_SOURCE_DATA_PLOT_AFTER_RECALCULATE return nPlot.Folger 03/18/08 QA80-11260 FIX_RUNTIME_ERROR_IN_MATRIX_FITTING_PREVIEW_AND_REPORT_GRAPH if ( !drSubRange ) drSubRange = dr.if(wksTemp && c1 >= 0 && c1 <= c2) //-----{ if(bIsWeight) { Column col(wksTemp. //If column is error. nCntrl). r1. c1. r2. change range name from "W" to "ED" and then plot graph with error bar if( col && col.GetSize() >0 ) if( bCheckDup && check_has_plotted_in_graph(drSubRange.

LPCSTR lpszSub) { //okutil_find_string_localization(strDest. 0 if false int is_col_all_text(const Worksheet& wks.PercentText() > 50) return 1. } int compare_no_case_localization(const string& strDest. return strDest. lpsz). } ///---Sim 11-05-2007 IMPROVE_LOCALIZATION_ISSUE /* int compare_localization(const string& strDest. if(OKCOLTYPE_TEXT == nColDataType) return 1. LPCSTR lpszString2. need to check if all text //------. int nColDataType = cc. 1 if true. LPCSTR lpsz) { //okutil_compare_no_case_localization(strDest.Find(lpszSub). lpszString2) ). lpszString2) ). if(dobj. bool bNoCaseSensitive) { if ( bNoCaseSensitive ) return ( 0 == lstrcmpi(lpszString1.// return <0 if err. LPCSTR lpsz) { //okutil_compare_localization(strDest.Columns(nCol). //------return 0. else return ( 0 == lstrcmp(lpszString1. } */ static bool _compare_string(LPCSTR lpszString1. } .Compare(lpsz). lpsz).CPY 5/8/2007 QA70-9735 OPTION_TO_TURN_OFF_CATEGORICAL_CONVERT DatasetObject dobj(cc). more complicated. int nCol) { Column cc = wks. lpszSub). return strDest. if(!cc) return -1. } int find_string_localization(const string& strDest.CompareNoCase(lpsz). if(nColDataType != OKCOLTYPE_TEXT_NUMERIC) return 0. // mixed data type.GetFormat(). return strDest.

LPCSTR lpszEnglishSub. } int find_string_localization(LPCSTR lpszLocalDest.SetSize(nXcs). double dMin = 0.bool compare_string_localization(LPCSTR lpszLocalDest.. bNoCaseSensitive) ) return true. int nPos. } return -1. . then get mean value of each part ( (dmin+dmax)/2 ) if(nNumNonMissing == 1) { double dValTemp = vInputx[nInputSize . if ( _compare_string(lpszLocalDest. ///---END IMPROVE_LOCALIZATION_ISSUE /// ML 12/17/2007 INITIALIZING_PEAK_POSITIONS_FOR_REPLICAS_FITTING /* ///Cheney 2007-11-20 INIT_XC_FOR_REPLICA_FITTING bool init_xc_for_replica_fitting(const vector& vInputx. return false. int nIndexMin = 0. dMax. int nInputSize = vInputx. vXc = NANUM //else if input data has only 1 non-missing value. dMax = 0.Find(lpszEnglishSub)) ) return nPos.1]. set vXc equal to it //else divide it as nXcs part.. if ( -1 < (nPos = strLocalDest.Find(GetLocalized(lpszEnglishSub))) ) return nPos. vector& vXc) { int nXcs = nReplica + 1. bNoCaseSensitive) ) return true. vXc. &nIndexMin). int nReplica. int nNumNonMissing = vInputx. if ( -1 < (nPos = strLocalDest. lpszEnglishSub. GetLocalized(lpszEnglishSub). vXc = NANUM. //if input data is all missing. LPCSTR lpszEnglishSub) { string strLocalDest(lpszLocalDest).GetMinMax( dMin. if(nXcs < 1 || nInputSize < 1) return false.GetSize(). bool bNoCaseSensitive) // = false { if ( _compare_string(lpszLocalDest.

GetSize(). } else if( nNumNonMissing > 1 ) { //trim missing vector vtempInput. int nInputSizeY = vInputY. nIndexMin). ii < nXcs.GetSize() . nInputSizeY). ///end REPLICA_AUTO_INIT_PARAMS_FOR_SURFACE_FIT .vXc = dValTemp. ii == nXcs-1? -1 : nSizeInc*(ii+1)-1 ). int nSizeInc = nTempInputSize / nXcs. vector& vXc. if(nNumNonMissing < nInputSize) //exist missing val { ASSERT(nIndexMin > 0). double rBaseline) ///Arvin 03/11/08 REPLICA_AUTO_INIT_PARAMS_FOR_SURFACE_FIT //Move to curve_utils.GetSubVector( vtempInput.1] + vtemp[0] ) / 2.GetSize(). } //loop to get each part's mean value ( (dmin+dmax)/2 ) int nTempInputSize = vtempInput. const vector& vInputY. vtempInput = vInputx. for(int ii = 0. if(numPeaksNeeded < 1 || nInputSize < 1) return false. } ///end INIT_XC_FOR_REPLICA_FITTING */ /// Cloud 12/18/07 TREAT_SIGN_OF_PEAK_AREA //bool init_xc_for_replica_fitting(const vector& vInputX. ///Arvin 03/11/08 REPLICA_AUTO_INIT_PARAMS_FOR_SURFACE_FIT if(!findPeakCtrlInfo) return false. double rBaseline.GetSubVector( vtemp. int nReplica. vtempInput.cpp /* bool init_xc_for_replica_fitting(const vector& vInputX. vector& vXc. int nInputSizeX = vInputX. int nReplica. vXc[ii] = ( vtemp[vtemp. vector* pvPeaksY) /// End TREAT_SIGN_OF_PEAK_AREA { int numPeaksNeeded = nReplica + 1. vInputx.GetSize(). int nInputSize = min(nInputSizeX. ii++) { vector vtemp. } } return true. nSizeInc*ii. const vector& vInputY.

/// Cloud 12/18/07 TREAT_SIGN_OF_PEAK_AREA //for (int ipeak = 0.Reorder(vnReorder). vector vYPeaks(nInputSize). vnPeakIndices. /// Cloud 12/18/07 TREAT_SIGN_OF_PEAK_AREA if (pvPeaksY != NULL) *pvPeaksY = vYPeaks. vXPeaks. vnReorder). vXc = NANUM.SetSize(nCount). ipeak < numPeaksToReturn. BOTH_DIRECTION). vXPeaks(nInputSize). //} for (int ipeak = 0. vector<uint> vnReorder(nCount).SetSize(numPeaksNeeded).rBaseline. } /// End TREAT_SIGN_OF_PEAK_AREA } } return true. ipeak++) { vXc[ipeak] = vXPeaks[ipeak]. vYPeaks.ipeak . // this is needed because ocmath_find_peaks_1st_derivative() seems to require the baseline to be 0 UINT nCount = nInputSize. /// End TREAT_SIGN_OF_PEAK_AREA vYPeaks = fabs(vYPeaks).1]. vInputX. vYPeaks. vector vInputYBaselineSubtracted.Sort(SORT_DESCENDING. vInputYBaselineSubtracted = vInputY . vInputYBaselineSubtracted. if (OE_NOERROR == nRet) { if (0 < nCount) { // Sort the peaks (I want to use the highest peaks): vYPeaks. ipeak++) //{ //vXc[ipeak] = vXPeaks[numPeaksToReturn . TRUE. /// End TREAT_SIGN_OF_PEAK_AREA int numPeaksToReturn = min(numPeaksNeeded. /// Cloud 12/18/07 TREAT_SIGN_OF_PEAK_AREA if (pvPeaksY != NULL) pvPeaksY->Reorder(vnReorder). ipeak < numPeaksToReturn. (int)nCount). vXPeaks. vXPeaks. vector<int> vnPeakIndices(nInputSize). int nRet = ocmath_find_peaks_1st_derivative(&nCount.SetSize(nCount).vXc. .

vector& vy. int nCursorType) // =NULL. lpcszMsg2). vy. lpcszTitle. LPCSTR lpcszMsg1. } /// end PA_PEAKS_ADD_DONE_DIALOG_WHEN_MODIFY /// Iris 4/23/2008 HIDE_INNER_OUTER_LIMITS_FOR_XYFIT_AND_PA static bool _is_data_from_matrix(GraphLayer& gl) { DataPlot dp = gl. TRUE). lpcszTitle). -1. NULL. int nPts. "Originlab\\GetPtsDlg". LPCSTR lpcszMsg2) typedef int (*FUNC_GRAPH_GET_PTS)(vector& vx. int nCursorType. LPCSTR lpcszMsg2). int nPts. pnPlotIndex). bool graph_done_window(LPCSTR lpcstrMsg. -1. return false. NULL /// CPY 04/02/08 OPTION_TO_AUTOSIZE_GETPOINTSDIALOG added bAutoResize { FUNC_GRAPH_DONE_WND pfn = Project.FindFunction("GetPtsDlg". lpcszMsg1. nPts. if(_pfn) /// Hong 03/26/08 v8/0832b PA_PEAKS_DEL_REQUIRE_FOLLOWING_DATA_SUPPORT //return _pfn(vx.DataPlots(). //int graph_get_points(vector& vx. if(!dp) . LPCSTR lpcszTitle. /// end PA_PEAKS_DEL_REQUIRE_FOLLOWING_DATA_SUPPORT return -2. TRUE). return _pfn(vx. } */ ///end REPLICA_AUTO_INIT_PARAMS_FOR_SURFACE_FIT /// end INITIALIZING_PEAK_POSITIONS_FOR_REPLICAS_FITTING //----. /// end PA_PEAKS_DEL_REQUIRE_FOLLOWING_DATA_SUPPORT { FUNC_GRAPH_GET_PTS _pfn = Project. BOOL bAutoResize. int* pnPlotIndex). LPCSTR lpcszTitle). BOOL bAutoResize) // = true. } //----/// Hong 03/26/08 QA80-11323 v8. LPCSTR lpcszTitle. LPCSTR lpcszMsg1. lpcszMsg1. int* pnPlotIndex. LPCSTR lpcszTitle. LPCSTR lpcszMsg2. int nCursorType. LPCSTR lpcszMsg1. int nPts.return false. bAutoResize. vector& vy.CPY 2/3/2008 QA70-11050 GET_PTS_NEED_USER_DECIDE_TERMINATION /// Hong 03/26/08 v8/0832b PA_PEAKS_DEL_REQUIRE_FOLLOWING_DATA_SUPPORT //typedef int (*FUNC_GRAPH_GET_PTS)(vector& vx. nPts.0832b PA_PEAKS_ADD_DONE_DIALOG_WHEN_MODIFY typedef bool (*FUNC_GRAPH_DONE_WND)(LPCSTR lpcstrMsg.FindFunction("GetDoneDlg". NULL. int nCursorType. vy. int graph_get_points(vector& vx. vector& vy. LPCSTR lpcszMsg1. lpcszMsg2. vector& vy. nCursorType. int nPts. if ( pfn ) return pfn(lpcstrMsg. LPCSTR lpcszMsg2. NULL. nCursorType.

"fitmatrix")) nFitType = NLFIT_MATRIX_FITTING. if(ocu_load_err_msg_str(nErrCode.Find("%d")) >= 0) { int nStrPos = str1. } return nFitType. } int { get_fit_type(string& strClassName) int nFitType = NLFIT_GENERAL_XY_FITTING. if(nFitType != NLFIT_GENERAL_XY_FITTING && gl && _is_data_from_matrix(gl)) { nFitType = NLFIT_MATRIX_FITTING. if(0 == nErrCode) return lpcsz. lpcszClass always be "fitsurface" //because in the menu. /// ML 10/1/2007 QA70-10268 MATRIX_FIT_FROM_CONTOUR_PLOT //if(IDM_PLOT_3D_MESH == nPlotType) // bFromMatrix = true. /// end MATRIX_FIT_FROM_CONTOUR_PLOT return bFromMatrix.Find("%s"). int nPlotType = dp. "fitsurface")) nFitType = NLFIT_XYZ_FITTING.GetPlotType().return false.Folger 08/29/08 QA80-12120 CENTRALIZE_XF_ERROR_MSEEAGE_PARSING_CODE string xf_load_err_msg(int nErrCode. &str1)) { int nNumPos. only could select surface fit GraphLayer gl = Project. if((nNumPos = str1. bool bFromMatrix = false. string str1.ActiveLayer(). //if do 3d fitting from graph. else if(!lstrcmp(strClassName. if(IDM_PLOT_3D_MESH == nPlotType || IDM_PLOT_CONTOUR == nPlotType) bFromMatrix = true. } ///end HIDE_INNER_OUTER_LIMITS_FOR_XYFIT_AND_PA //-----. LPCSTR lpcsz) { string strMsg. if(nStrPos >= 0) { . if from graph. if(!lstrcmp(strClassName. strClassName = "fitmatrix".

strMsg.GetTokens(vs. LPSTR lpstr = strMsg. } . } } else if(str1. else { if(nStrPos > nNumPos) { int nCode = atoi(vs[0]). '|') != 2) ocu_load_msg_str(CER_USERMSG_NOT_CORRECT_SEPARATED. &strMsg. lpstr2) : lpcszErrMsg. bShowBox. if(strArgs. nCode). string strMsg = lpcszErrMsg.Find("%s") >= 0) strMsg. } else { ocu_load_msg_str(CER_ERR_MSG_NOT_FOUND. } } } else { int nCode = atoi(lpcsz).GetBuffer(strMsg. strMsg. vs[1]). else strMsg = str1. uType). nCode.Format(str1.Format(str1. &strMsg. LPSTR lpstr2 = is_str_numeric_integer_with_args(lpstr.GetLength()). } int xf_warning_msg_box(LPCSTR lpcszErrMsg.ReleaseBuffer(). lpcsz). cMsgType. UINT uType/* = MB_OK*/) { int nErrCode = -1. NULL. return warning_msg_box(lpstr2 ? xf_load_err_msg(nErrCode. nCode).Format(str1. vs[0]. char cMsgType/* = 'W'*/.Format(str1.vector<string> vs. lpcsz). strMsg. bool bShowBox. strMsg. &nErrCode). string strArgs = lpcsz. } return strMsg. &nErrCode). } else { int nCode = atoi(vs[1]).

//-----.End CENTRALIZE_XF_ERROR_MSEEAGE_PARSING_CODE .