'Facing "attribute error:'dict' object has no attribute 'dtype' " in google colab

I have tried "Data Augmentation" for a numeric datatset. Augmentation is successful but while exporting the augmented dataset from google colab I am facing the Attribute Error:'dict' object has no attribute 'dtype' The part of the code and the error message is given below:


    A=[]    
    A.append(df)   
    for _ in range(5):   
      for _,row in df.iterrows():     
        temp = {   
            'PERCENT_PUB_DATA': row['PERCENT_PUB_DATA'] + np.random.uniform(percst),
            'ACCESS_TO_PUB_DATA':row['ACCESS_TO_PUB_DATA'] + np.random.uniform(accest),
            'COUPLING_BETWEEN_OBJECTS':row['COUPLING_BETWEEN_OBJECTS'] + np.random.uniform(coupst),
            'DEPTH':row['DEPTH'] + np.random.uniform(deptst),
            'LACK_OF_COHESION_OF_METHODS':row['LACK_OF_COHESION_OF_METHODS'] + np.random.uniform(lackst),
            'NUM_OF_CHILDREN':row['NUM_OF_CHILDREN'] + np.random.uniform(numost),
            'DEP_ON_CHILD':row['DEP_ON_CHILD'] + np.random.uniform(depost),
            'FAN_IN':row['FAN_IN'] + np.random.uniform(fanist),
            'RESPONSE_FOR_CLASS':row['RESPONSE_FOR_CLASS'] + np.random.uniform(respst),
            'WEIGHTED_METHODS_PER_CLASS':row['WEIGHTED_METHODS_PER_CLASS'] + np.random.uniform(weigst),
            'minLOC_BLANK':row['minLOC_BLANK'] + np.random.uniform(blankst),
            'minBRANCH_COUNT':row['minBRANCH_COUNT'] + np.random.uniform(branchst),
            'minLOC_CODE_AND_COMMENT':row['minLOC_CODE_AND_COMMENT'] + np.random.uniform(codest),
            'minLOC_COMMENTS':row['minLOC_COMMENTS'] + np.random.uniform(comentsst),
            'minCYCLOMATIC_COMPLEXITY':row['minCYCLOMATIC_COMPLEXITY'] + np.random.uniform(cyclost),
            'minDESIGN_COMPLEXITY':row['minDESIGN_COMPLEXITY'] + np.random.uniform(desist),
            'minESSENTIAL_COMPLEXITY':row['minESSENTIAL_COMPLEXITY'] + np.random.uniform(essest),
            'minLOC_EXECUTABLE':row['minLOC_EXECUTABLE'] + np.random.uniform(execst),
            'minHALSTEAD_CONTENT':row['minHALSTEAD_CONTENT'] + np.random.uniform(contst),
            'minHALSTEAD_DIFFICULTY':row['minHALSTEAD_DIFFICULTY'] + np.random.uniform(diffest),
            'minHALSTEAD_EFFORT':row['minHALSTEAD_EFFORT'] + np.random.uniform(effortsst),
            'minHALSTEAD_ERROR_EST':row['minHALSTEAD_ERROR_EST'] + np.random.uniform(errost),
            'minHALSTEAD_LENGTH':row['minHALSTEAD_LENGTH'] + np.random.uniform(lengtst),
            'minHALSTEAD_LEVEL':row['minHALSTEAD_LEVEL'] + np.random.uniform(levst),
            'minHALSTEAD_PROG_TIME':row['minHALSTEAD_PROG_TIME'] + np.random.uniform(progst),
            'minHALSTEAD_VOLUME':row['minHALSTEAD_VOLUME'] + np.random.uniform(volust),
            'minNUM_OPERANDS':row['minNUM_OPERANDS'] + np.random.uniform(operanst),
            'minNUM_OPERATORS':row['minNUM_OPERATORS'] + np.random.uniform(operatst),
            'minNUM_UNIQUE_OPERANDS':row['minNUM_UNIQUE_OPERANDS'] + np.random.uniform(uoperandst),
            'minNUM_UNIQUE_OPERATORS' :row['minNUM_UNIQUE_OPERATORS'] + np.random.uniform(uoperatorst),
            'minLOC_TOTAL' :row['minLOC_TOTAL'] + np.random.uniform(totst),
            'maxLOC_BLANK' :row['maxLOC_BLANK'] + np.random.uniform(mblankst),
            'maxBRANCH_COUNT' :row['maxBRANCH_COUNT'] + np.random.uniform(branchcountst),
            'maxLOC_CODE_AND_COMMENT' :row['maxLOC_CODE_AND_COMMENT'] + np.random.uniform(mcodest),
            'maxLOC_COMMENTS' :row['maxLOC_COMMENTS'] + np.random.uniform(mcommentst),
            'maxCYCLOMATIC_COMPLEXITY' :row['maxCYCLOMATIC_COMPLEXITY'] + np.random.uniform(mcyclost),
            'maxDESIGN_COMPLEXITY' :row['maxDESIGN_COMPLEXITY'] + np.random.uniform(mdesist),
            'maxESSENTIAL_COMPLEXITY' :row['maxESSENTIAL_COMPLEXITY'] + np.random.uniform(messenst),
            'maxLOC_EXECUTABLE' :row['maxLOC_EXECUTABLE'] + np.random.uniform(mlocst),
            'maxHALSTEAD_CONTENT' :row['maxHALSTEAD_CONTENT'] + np.random.uniform(mhalconst),
            'maxHALSTEAD_DIFFICULTY' :row['maxHALSTEAD_DIFFICULTY'] + np.random.uniform(mhaldiffst),
            'maxHALSTEAD_EFFORT' :row['maxHALSTEAD_EFFORT'] + np.random.uniform(mhaleffst),
            'maxHALSTEAD_ERROR_EST' :row['maxHALSTEAD_ERROR_EST'] + np.random.uniform(mhalerrst),
            'maxHALSTEAD_LENGTH' :row['maxHALSTEAD_LENGTH'] + np.random.uniform(mhallenst),
            'maxHALSTEAD_LEVEL' :row['maxHALSTEAD_LEVEL'] + np.random.uniform(mhallevst),
            'maxHALSTEAD_PROG_TIME' :row['maxHALSTEAD_PROG_TIME'] + np.random.uniform(mhalpst),
            'maxHALSTEAD_VOLUME' :row['maxHALSTEAD_VOLUME'] + np.random.uniform(mhalvst),
            'maxNUM_OPERANDS' :row['maxNUM_OPERANDS'] + np.random.uniform(mnumopst),
            'maxNUM_OPERATORS' :row['maxNUM_OPERATORS'] + np.random.uniform(mnopst),
            'maxNUM_UNIQUE_OPERANDS':row['maxNUM_UNIQUE_OPERANDS'] + np.random.uniform(muopst),
            'maxNUM_UNIQUE_OPERATORS':row['maxNUM_UNIQUE_OPERATORS'] + np.random.uniform(muoprst),
            'maxLOC_TOTAL':row['maxLOC_TOTAL'] + np.random.uniform(mloctst),
            'avgLOC_BLANK' :row['avgLOC_BLANK'] + np.random.uniform(alocbst),
            'avgBRANCH_COUNT' :row['avgBRANCH_COUNT'] + np.random.uniform(abcst),
            'avgLOC_CODE_AND_COMMENT' :row['avgLOC_CODE_AND_COMMENT'] + np.random.uniform(aloccodest),
            'avgLOC_COMMENTS' :row['avgLOC_COMMENTS'] + np.random.uniform(aloccommst),
            'avgCYCLOMATIC_COMPLEXITY' :row['avgCYCLOMATIC_COMPLEXITY'] + np.random.uniform(acyclost),
            'avgDESIGN_COMPLEXITY' :row['avgDESIGN_COMPLEXITY'] + np.random.uniform(adesigst),
            'avgESSENTIAL_COMPLEXITY' :row['avgESSENTIAL_COMPLEXITY'] + np.random.uniform(aessest),
            'avgLOC_EXECUTABLE' :row['avgLOC_EXECUTABLE'] + np.random.uniform(alocexest),
            'avgHALSTEAD_CONTENT' :row['avgHALSTEAD_CONTENT'] + np.random.uniform(ahalconst),
            'avgHALSTEAD_DIFFICULTY' :row['avgHALSTEAD_DIFFICULTY'] + np.random.uniform(ahaldifficst),
            'avgHALSTEAD_EFFORT' :row['avgHALSTEAD_EFFORT'] + np.random.uniform(ahaleffortst),
            'avgHALSTEAD_ERROR_EST' :row['avgHALSTEAD_ERROR_EST'] + np.random.uniform(ahalestst),
            'avgHALSTEAD_LENGTH' :row['avgHALSTEAD_LENGTH'] + np.random.uniform(ahallenst),
            'avgHALSTEAD_LEVEL' :row['avgHALSTEAD_LEVEL'] + np.random.uniform(ahallevst),
            'avgHALSTEAD_PROG_TIME' :row['avgHALSTEAD_PROG_TIME'] + np.random.uniform(ahalprogst),
            'avgHALSTEAD_VOLUME' :row['avgHALSTEAD_VOLUME'] + np.random.uniform(ahalvolst),
            'avgNUM_OPERANDS' :row['avgNUM_OPERANDS'] + np.random.uniform(ahalnumost),
            'avgNUM_OPERATORS' :row['avgNUM_OPERATORS'] + np.random.uniform(ahalnumopst),
            'avgNUM_UNIQUE_OPERANDS' :row['avgNUM_UNIQUE_OPERANDS'] + np.random.uniform(anumoperanst),
            'avgNUM_UNIQUE_OPERATORS' :row['avgNUM_UNIQUE_OPERATORS'] + np.random.uniform(anumuniquest),
            'avgLOC_TOTAL' :row['avgLOC_TOTAL'] + np.random.uniform(aloctst),
            'sumLOC_BLANK' :row['sumLOC_BLANK'] + np.random.uniform(alocbst),
            'sumBRANCH_COUNT' :row['sumBRANCH_COUNT'] + np.random.uniform(sumbst),
            'sumLOC_CODE_AND_COMMENT' :row['sumLOC_CODE_AND_COMMENT'] + np.random.uniform(sunlst),
            'sumLOC_COMMENTS' :row['sumLOC_COMMENTS'] + np.random.uniform(sumlcommst),
            'sumCYCLOMATIC_COMPLEXITY' :row['sumCYCLOMATIC_COMPLEXITY'] + np.random.uniform(sumcyclost),
            'sumDESIGN_COMPLEXITY' :row['sumDESIGN_COMPLEXITY'] + np.random.uniform(sumdesist),
            'sumESSENTIAL_COMPLEXITY' :row['sumESSENTIAL_COMPLEXITY'] + np.random.uniform(sumessst),
            'sumLOC_EXECUTABLE' :row['sumLOC_EXECUTABLE'] + np.random.uniform(sumexst),
            'sumHALSTEAD_CONTENT' :row['sumHALSTEAD_CONTENT'] + np.random.uniform(sumconst),
            'sumHALSTEAD_DIFFICULTY' :row['sumHALSTEAD_DIFFICULTY'] + np.random.uniform(sumdiffest),
            'sumHALSTEAD_EFFORT' :row['sumHALSTEAD_EFFORT'] + np.random.uniform(sumeffst),
            'sumHALSTEAD_ERROR_EST' :row['sumHALSTEAD_ERROR_EST'] + np.random.uniform(sumerrost),
            'sumHALSTEAD_LENGTH' :row['sumHALSTEAD_LENGTH'] + np.random.uniform(sumlengst),
            'sumHALSTEAD_LEVEL' :row['sumHALSTEAD_LEVEL'] + np.random.uniform(sumlevst),
            'sumHALSTEAD_PROG_TIME' :row['sumHALSTEAD_PROG_TIME'] + np.random.uniform(sumprogst),
            'sumHALSTEAD_VOLUME' :row['sumHALSTEAD_VOLUME'] + np.random.uniform(sumvolust),
            'sumNUM_OPERANDS' :row['sumNUM_OPERANDS'] + np.random.uniform(sumoperst),
            'sumNUM_OPERATORS' :row['sumNUM_OPERATORS'] + np.random.uniform(sumoperandst),
            'sumNUM_UNIQUE_OPERANDS' :row['sumNUM_UNIQUE_OPERANDS'] + np.random.uniform(sumuopst),
            'sumNUM_UNIQUE_OPERATORS' :row['sumNUM_UNIQUE_OPERATORS'] + np.random.uniform(sumuoprst),
            'sumLOC_TOTAL' :row['sumLOC_TOTAL'] + np.random.uniform(sumtolst),
            'DEFECTT' :row['DEFECTT'] + np.random.uniform(deftst),
            'DEFECT5' :row['DEFECT5'] + np.random.uniform(defest),
            'NUMDEFECTS' :row['NUMDEFECTS'] + np.random.uniform(ndefst)
                                                                          
         } 
    
    
        A.append(temp)
      
    
        print(len(A), "dataset created") 
    df=pd. DataFrame(A)  
    df.to_csv("A1.csv")      

The output is as follows

726 dataset created


AttributeError Traceback (most recent call last)

in () 1 print(len(A), "dataset created") ----> 2 df=pd. DataFrame(A) 3 df.to_csv("A1.csv")

5 frames

/usr/local/lib/python3.7/dist-packages/pandas/core/dtypes/cast.py in maybe_convert_platform(values) 122 arr = values 123 --> 124 if arr.dtype == object: 125 arr = cast(np.ndarray, arr) 126 arr = lib.maybe_convert_objects(arr)

AttributeError: 'dict' object has no attribute 'dtype'

Any help is appreciated Thank You!



Solution 1:[1]

A=[]
if you use curly braces {} rather than block braces [] then the problem

AttributeError: 'dict' object has no attribute 'dtype'

will be solved.

Use this: A={}

Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source
Solution 1 thirdDeveloper