File packages/item_upload.py changed (mode: 100644) (index 730dbcf..a916f3e) |
... |
... |
import sys |
3 |
3 |
import re |
import re |
4 |
4 |
import chardet |
import chardet |
5 |
5 |
import collections |
import collections |
|
6 |
|
import colorama |
|
7 |
|
import inspect |
6 |
8 |
from sys import exit |
from sys import exit |
7 |
9 |
from packages import barcode, amazon_data_upload, price_upload |
from packages import barcode, amazon_data_upload, price_upload |
8 |
10 |
|
|
|
... |
... |
from packages import barcode, amazon_data_upload, price_upload |
10 |
12 |
class WrongEncodingException(Exception): |
class WrongEncodingException(Exception): |
11 |
13 |
pass |
pass |
12 |
14 |
|
|
|
15 |
|
try: |
|
16 |
|
def errorPrint(msg, err, linenumber): |
|
17 |
|
print(colorama.Fore.RED) |
|
18 |
|
print("ERROR:\nline:{0}\t{1}\tError:{2}".format(linenumber, msg, err)) |
|
19 |
|
print(colorama.Style.RESET_ALL) |
|
20 |
|
except AttributeError: |
|
21 |
|
def errorPrint(msg, err, linenumber): |
|
22 |
|
print("ERROR:\nline:{0}\t{1}\tError:{2}".format(linenumber, msg, err)) |
|
23 |
|
|
|
24 |
|
try: |
|
25 |
|
def warnPrint(msg, err, linenumber): |
|
26 |
|
print(colorama.Fore.YELLOW) |
|
27 |
|
print("WARNING:\nline:{0}\t{1}\tWarning:{2}" |
|
28 |
|
.format(linenumber, msg, err)) |
|
29 |
|
print(colorama.Style.RESET_ALL) |
|
30 |
|
except AttributeError: |
|
31 |
|
def warnPrint(msg, err, linenumber): |
|
32 |
|
print("WARNING:\nline:{0}\t{1}\tWarning:{2}" |
|
33 |
|
.format(linenumber, msg, err)) |
13 |
34 |
|
|
14 |
35 |
def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, filename): |
def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, filename): |
15 |
36 |
# The column headers for the output file as expected from the |
# The column headers for the output file as expected from the |
|
... |
... |
def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f |
53 |
74 |
# Get sets of all colors and sizes for each parent |
# Get sets of all colors and sizes for each parent |
54 |
75 |
# to find if there are some with only one attribute value for all childs |
# to find if there are some with only one attribute value for all childs |
55 |
76 |
color_size_sets = {} |
color_size_sets = {} |
56 |
|
color_size_sets = find_similar_attr(flatfile) |
|
|
77 |
|
color_size_sets = findSimilarAttr(flatfile) |
57 |
78 |
|
|
58 |
79 |
# PACKAGE PROPERTIES |
# PACKAGE PROPERTIES |
59 |
|
package_properties = get_properties(flatfile) |
|
|
80 |
|
package_properties = getProperties(flatfile) |
60 |
81 |
group_parent = '' |
group_parent = '' |
61 |
82 |
|
|
62 |
83 |
try: |
try: |
|
... |
... |
def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f |
88 |
109 |
position = 0 |
position = 0 |
89 |
110 |
if(row['parent_child'] == 'child'): |
if(row['parent_child'] == 'child'): |
90 |
111 |
isParent = False |
isParent = False |
91 |
|
attributes = get_attributes(dataset=row, |
|
|
112 |
|
attributes = getAttributes(dataset=row, |
92 |
113 |
sets=color_size_sets) |
sets=color_size_sets) |
93 |
114 |
if(group_parent and row['parent_sku'] == group_parent): |
if(group_parent and row['parent_sku'] == group_parent): |
94 |
115 |
position += 1 |
position += 1 |
95 |
116 |
except Exception as err: |
except Exception as err: |
96 |
|
print("Error @ attribute setting, line:{0}, err:{1}" |
|
97 |
|
.format(sys.exc_info()[2].tb_lineno, err)) |
|
|
117 |
|
warnPrint("Attribute setting failed", err, |
|
118 |
|
sys.exc_info()[2].tb_lineno) |
98 |
119 |
try: |
try: |
99 |
120 |
values = [ |
values = [ |
100 |
121 |
row['parent_sku'], row['item_sku'], |
row['parent_sku'], row['item_sku'], |
|
... |
... |
def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f |
133 |
154 |
.format(sys.exc_info()[2].tb_lineno, err)) |
.format(sys.exc_info()[2].tb_lineno, err)) |
134 |
155 |
Data[row['item_sku']] = collections.OrderedDict(zip(column_names, values)) |
Data[row['item_sku']] = collections.OrderedDict(zip(column_names, values)) |
135 |
156 |
except KeyError as err: |
except KeyError as err: |
136 |
|
print("Error reading file\nline:{0}err:{1}" |
|
137 |
|
.format(sys.exc_info()[2].tb_lineno, err)) |
|
|
157 |
|
errorPrint("Reading file failed", err, |
|
158 |
|
sys.exc_info()[2].tb_lineno) |
138 |
159 |
return row['item_sku'] |
return row['item_sku'] |
139 |
160 |
|
|
140 |
161 |
# open the intern number csv to get the item ID |
# open the intern number csv to get the item ID |
|
... |
... |
def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f |
160 |
181 |
Data[row]['ASIN-type'] = barcode_data[row]['ASIN-type'] |
Data[row]['ASIN-type'] = barcode_data[row]['ASIN-type'] |
161 |
182 |
Data[row]['ASIN-value'] = barcode_data[row]['ASIN-value'] |
Data[row]['ASIN-value'] = barcode_data[row]['ASIN-value'] |
162 |
183 |
except Exception as err: |
except Exception as err: |
163 |
|
print("ERROR @ Barcode Part for {0}.\n{1}.\n".format(row, err)) |
|
|
184 |
|
errorPrint("Barcode part for "+row, err, |
|
185 |
|
sys.exc_info()[2].tb_lineno) |
164 |
186 |
|
|
165 |
187 |
# Include the amazonsku |
# Include the amazonsku |
166 |
188 |
sku_data = amazon_data_upload.amazonSkuUpload(flatfile) |
sku_data = amazon_data_upload.amazonSkuUpload(flatfile) |
|
... |
... |
def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f |
173 |
195 |
Data[row]['amazon_sku'] = sku_data[row]['SKU'] |
Data[row]['amazon_sku'] = sku_data[row]['SKU'] |
174 |
196 |
Data[row]['amazon_parentsku'] = sku_data[row]['ParentSKU'] |
Data[row]['amazon_parentsku'] = sku_data[row]['ParentSKU'] |
175 |
197 |
except Exception as err: |
except Exception as err: |
176 |
|
print("ERROR @ SKU Part for {0}.\n{1}.\n".format(row, err)) |
|
|
198 |
|
errorPrint("SKU part for "+row, err, |
|
199 |
|
sys.exc_info()[2].tb_lineno) |
177 |
200 |
|
|
178 |
201 |
# Include the amazonsku |
# Include the amazonsku |
179 |
202 |
ama_data = amazon_data_upload.amazonDataUpload(flatfile) |
ama_data = amazon_data_upload.amazonDataUpload(flatfile) |
|
... |
... |
def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f |
185 |
208 |
Data[row]['fba-enabled'] = ama_data[row]['ItemAmazonFBA'] |
Data[row]['fba-enabled'] = ama_data[row]['ItemAmazonFBA'] |
186 |
209 |
Data[row]['fba-shipping'] = ama_data[row]['ItemShippingWithAmazonFBA'] |
Data[row]['fba-shipping'] = ama_data[row]['ItemShippingWithAmazonFBA'] |
187 |
210 |
except Exception as err: |
except Exception as err: |
188 |
|
print("ERROR @ AmazonData Part for {0}.\n{1}.\n".format(row, err)) |
|
|
211 |
|
errorPrint("AmazonData part for "+row, err, |
|
212 |
|
sys.exc_info()[2].tb_lineno) |
189 |
213 |
|
|
190 |
214 |
# Include the price |
# Include the price |
191 |
215 |
price_data = price_upload.priceUpload(flatfile) |
price_data = price_upload.priceUpload(flatfile) |
|
... |
... |
def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f |
199 |
223 |
Data[row]['webshop-price'] = price_data[row]['webshop'] |
Data[row]['webshop-price'] = price_data[row]['webshop'] |
200 |
224 |
Data[row]['etsy-price'] = price_data[row]['etsy'] |
Data[row]['etsy-price'] = price_data[row]['etsy'] |
201 |
225 |
except Exception as err: |
except Exception as err: |
202 |
|
print("ERROR @ Price Part for {0}.\n{1}.\n".format(row, err)) |
|
|
226 |
|
errorPrint("Price part for "+row, err, |
|
227 |
|
sys.exc_info()[2].tb_lineno) |
203 |
228 |
|
|
204 |
229 |
# Write Data into new CSV for Upload |
# Write Data into new CSV for Upload |
205 |
230 |
# OUTPUT |
# OUTPUT |
206 |
231 |
# -------------------------------------------------------------- |
# -------------------------------------------------------------- |
207 |
232 |
|
|
208 |
233 |
# Sort the dictionary to make sure that the parents are the first variant of each item |
# Sort the dictionary to make sure that the parents are the first variant of each item |
209 |
|
sorted_Data = sort_Products(Data) |
|
|
234 |
|
sorted_Data = sortProducts(Data) |
210 |
235 |
|
|
211 |
236 |
barcode.writeCSV(sorted_Data, "item", column_names, folder, filename) |
barcode.writeCSV(sorted_Data, "item", column_names, folder, filename) |
212 |
237 |
except UnicodeDecodeError as err: |
except UnicodeDecodeError as err: |
213 |
238 |
print("Decode Error at line: {0}, err: {1}".format(sys.exc_info()[2].tb_lineno, err)) |
print("Decode Error at line: {0}, err: {1}".format(sys.exc_info()[2].tb_lineno, err)) |
|
239 |
|
errorPrint("decoding problem", err, |
|
240 |
|
sys.exc_info()[2].tb_lineno) |
214 |
241 |
print("press ENTER to continue..") |
print("press ENTER to continue..") |
215 |
242 |
input() |
input() |
216 |
243 |
sys.exit() |
sys.exit() |
|
... |
... |
def itemPropertyUpload(flatfile, folder, filename): |
260 |
287 |
use_names = [i for i in property_names if i in [*row]] |
use_names = [i for i in property_names if i in [*row]] |
261 |
288 |
values = [row[i] for i in use_names] |
values = [row[i] for i in use_names] |
262 |
289 |
except ValueError as err: |
except ValueError as err: |
263 |
|
print("In property Upload: One of the values wasn't found : ", err) |
|
|
290 |
|
warnPrint("No Value for "+i, err, |
|
291 |
|
sys.exc_info()[2].tb_lineno) |
264 |
292 |
|
|
265 |
293 |
# Check for empty values |
# Check for empty values |
266 |
294 |
properties[row['item_sku']] = dict(zip(use_names, values)) |
properties[row['item_sku']] = dict(zip(use_names, values)) |
|
... |
... |
def itemPropertyUpload(flatfile, folder, filename): |
275 |
303 |
|
|
276 |
304 |
Data[row + prop] = dict(zip(column_names, values)) |
Data[row + prop] = dict(zip(column_names, values)) |
277 |
305 |
except KeyError as kerr: |
except KeyError as kerr: |
278 |
|
print("ERROR: Key {0} was not found in the flatfile" |
|
279 |
|
.format(kerr)) |
|
|
306 |
|
errorPrint("Key was not found in the flatfile", kerr, |
|
307 |
|
sys.exc_info()[2].tb_lineno) |
280 |
308 |
|
|
281 |
309 |
|
|
282 |
310 |
barcode.writeCSV(Data, "Item_Merkmale", column_names, folder, filename) |
barcode.writeCSV(Data, "Item_Merkmale", column_names, folder, filename) |
283 |
311 |
|
|
284 |
|
def get_properties(flatfile): |
|
|
312 |
|
def getProperties(flatfile): |
285 |
313 |
|
|
286 |
314 |
properties = {'length':0, |
properties = {'length':0, |
287 |
315 |
'width':0, |
'width':0, |
|
... |
... |
def get_properties(flatfile): |
316 |
344 |
properties[ 'width' ] = int(float(row['package_width'])) |
properties[ 'width' ] = int(float(row['package_width'])) |
317 |
345 |
properties[ 'weight' ] = int(float(row['package_weight'])) |
properties[ 'weight' ] = int(float(row['package_weight'])) |
318 |
346 |
except Exception as err: |
except Exception as err: |
319 |
|
print("Error @ setting values: line:{0}, err:{1}" |
|
320 |
|
.format(sys.exc_info()[2].tb_lineno, err)) |
|
|
347 |
|
errorPrint("getProperties setting values failed", err, |
|
348 |
|
sys.exc_info()[2].tb_lineno) |
321 |
349 |
|
|
322 |
350 |
except ValueError as err: |
except ValueError as err: |
323 |
|
print(err) |
|
324 |
|
print("/nPlease copy the values for height, length, width", |
|
325 |
|
"and weight\nfrom the children to the parent", |
|
326 |
|
"variation in the flatfile.\n") |
|
|
351 |
|
errorPrint("Parent has no package measurements", err, |
|
352 |
|
sys.exc_info()[2].tb_lineno) |
327 |
353 |
exit() |
exit() |
328 |
|
except Exception as err: |
|
329 |
|
print("Error @ setting values: line:{0}, err:{1}" |
|
330 |
|
.format(sys.exc_info()[2].tb_lineno, err)) |
|
331 |
354 |
|
|
332 |
355 |
return properties |
return properties |
333 |
356 |
|
|
334 |
|
def get_attributes(dataset, sets): |
|
|
357 |
|
def getAttributes(dataset, sets): |
335 |
358 |
|
|
336 |
359 |
output_string = '' |
output_string = '' |
337 |
360 |
try: |
try: |
|
... |
... |
def get_attributes(dataset, sets): |
343 |
366 |
dataset['parent_sku'], ','.join([*sets]) |
dataset['parent_sku'], ','.join([*sets]) |
344 |
367 |
)) |
)) |
345 |
368 |
except Exception as err: |
except Exception as err: |
346 |
|
print("Error @ adding color to string (get_attributes)\nerr:{0}" |
|
347 |
|
.format(err)) |
|
|
369 |
|
errorPrint("Adding of color attribute failed", err, |
|
370 |
|
sys.exc_info()[2].tb_lineno) |
348 |
371 |
try: |
try: |
349 |
372 |
if(len(sets[dataset['parent_sku']]['size']) > 1): |
if(len(sets[dataset['parent_sku']]['size']) > 1): |
350 |
373 |
if(not(output_string)): |
if(not(output_string)): |
|
... |
... |
def get_attributes(dataset, sets): |
352 |
375 |
else: |
else: |
353 |
376 |
output_string = output_string + ';size_name:' + dataset['size_name'] |
output_string = output_string + ';size_name:' + dataset['size_name'] |
354 |
377 |
except Exception as err: |
except Exception as err: |
355 |
|
print("Error @ adding size to string\nerr:{0}" |
|
356 |
|
.format(err)) |
|
|
378 |
|
errorPrint("Adding of size attribute failed", err, |
|
379 |
|
sys.exc_info()[2].tb_lineno) |
357 |
380 |
return output_string |
return output_string |
358 |
381 |
|
|
359 |
|
def find_similar_attr(flatfile): |
|
|
382 |
|
def findSimilarAttr(flatfile): |
360 |
383 |
|
|
361 |
384 |
Data = {} |
Data = {} |
362 |
385 |
|
|
|
... |
... |
def find_similar_attr(flatfile): |
377 |
400 |
Data[row['parent_sku']]['size'].add(row['size_name']) |
Data[row['parent_sku']]['size'].add(row['size_name']) |
378 |
401 |
return Data |
return Data |
379 |
402 |
|
|
380 |
|
def sort_Products(dataset): |
|
|
403 |
|
def sortProducts(dataset): |
381 |
404 |
item_list = dataset.items() |
item_list = dataset.items() |
382 |
405 |
new_dict = collections.OrderedDict() |
new_dict = collections.OrderedDict() |
383 |
406 |
parent_dict = collections.OrderedDict() |
parent_dict = collections.OrderedDict() |
|
... |
... |
def sort_Products(dataset): |
391 |
414 |
# add the parent to the new dict |
# add the parent to the new dict |
392 |
415 |
new_dict[item[0]] = item[1] |
new_dict[item[0]] = item[1] |
393 |
416 |
# get all the children and update the itemlist without them |
# get all the children and update the itemlist without them |
394 |
|
child_dict = search_child(item_list=item_list, parent=item[0]) |
|
|
417 |
|
child_dict = searchChild(item_list=item_list, parent=item[0]) |
395 |
418 |
# add each child to the new dict after the parent |
# add each child to the new dict after the parent |
396 |
419 |
for child in child_dict: |
for child in child_dict: |
397 |
420 |
new_dict[child] = child_dict[child] |
new_dict[child] = child_dict[child] |
398 |
421 |
|
|
399 |
422 |
return new_dict |
return new_dict |
400 |
423 |
|
|
401 |
|
def search_child(item_list, parent): |
|
|
424 |
|
def searchChild(item_list, parent): |
402 |
425 |
child_dict = collections.OrderedDict() |
child_dict = collections.OrderedDict() |
403 |
426 |
|
|
404 |
427 |
for item in item_list: |
for item in item_list: |
|
... |
... |
def search_child(item_list, parent): |
407 |
430 |
|
|
408 |
431 |
return child_dict |
return child_dict |
409 |
432 |
|
|
410 |
|
def check_flatfile(flatfile): |
|
|
433 |
|
def checkFlatfile(flatfile): |
411 |
434 |
try: |
try: |
412 |
435 |
with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item: |
with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item: |
413 |
436 |
reader = csv.DictReader(item, delimiter=';') |
reader = csv.DictReader(item, delimiter=';') |
414 |
437 |
|
|
415 |
438 |
first_row = [* list(reader)[0] ] |
first_row = [* list(reader)[0] ] |
|
439 |
|
if(len(first_row) == 1): |
|
440 |
|
errorPrint("Wrong delimiter, use ';'", |
|
441 |
|
'False delimiter detected', |
|
442 |
|
inspect.currentframe().f_back.f_lineno) |
|
443 |
|
return False |
416 |
444 |
if(not( 'feed_product_type' in first_row )): |
if(not( 'feed_product_type' in first_row )): |
417 |
445 |
if( 'Marke' in first_row ): |
if( 'Marke' in first_row ): |
|
446 |
|
errorPrint("Only use the last of the 3 header lines", err, |
|
447 |
|
sys.exc_info()[2].tb_lineno) |
418 |
448 |
print("Please cut the first two rows from the flatfile for this script\n") |
print("Please cut the first two rows from the flatfile for this script\n") |
419 |
449 |
return False |
return False |
420 |
450 |
else: |
else: |
421 |
|
print("This file contains the wrong column header\n{0}\n".format(','.join(first_row))) |
|
|
451 |
|
errorPrint("Wrong header line", err, |
|
452 |
|
sys.exc_info()[2].tb_lineno) |
422 |
453 |
return False |
return False |
423 |
454 |
else: |
else: |
424 |
455 |
return True |
return True |
425 |
456 |
except Exception as err: |
except Exception as err: |
426 |
|
print("ERROR @ flatfile checking : {0}".format(err)) |
|
|
457 |
|
warnPrint("Flatfile check failed", err, |
|
458 |
|
sys.exc_info()[2].tb_lineno) |
427 |
459 |
|
|
428 |
|
def check_encoding(file_dict): |
|
|
460 |
|
def checkEncoding(file_dict): |
429 |
461 |
try: |
try: |
430 |
462 |
with open(file_dict['path'], mode='rb') as item: |
with open(file_dict['path'], mode='rb') as item: |
431 |
463 |
try: |
try: |
432 |
464 |
raw_data = item.read() |
raw_data = item.read() |
433 |
465 |
except Exception as err: |
except Exception as err: |
434 |
466 |
print("ERROR: {0}\n".format(err)) |
print("ERROR: {0}\n".format(err)) |
|
467 |
|
errorPrint("check Encoding reading failed", err, |
|
468 |
|
sys.exc_info()[2].tb_lineno) |
435 |
469 |
file_dict['encoding'] = chardet.detect(raw_data)['encoding'] |
file_dict['encoding'] = chardet.detect(raw_data)['encoding'] |
436 |
470 |
print("chardet data for {0}\n{1}\n".format(file_dict['path'], chardet.detect(raw_data))) |
print("chardet data for {0}\n{1}\n".format(file_dict['path'], chardet.detect(raw_data))) |
437 |
471 |
|
|
438 |
472 |
except Exception as err: |
except Exception as err: |
439 |
|
print("Error : {0}\n".format(err)) |
|
|
473 |
|
errorPrint("check Encoding failed", err, |
|
474 |
|
sys.exc_info()[2].tb_lineno) |
440 |
475 |
|
|
441 |
476 |
return file_dict |
return file_dict |
442 |
477 |
|
|
443 |
|
def get_variationid(exportfile, sku): |
|
|
478 |
|
def getVariationId(exportfile, sku): |
444 |
479 |
|
|
445 |
480 |
variationid = 0 |
variationid = 0 |
446 |
481 |
with open(exportfile['path'], mode = 'r', encoding = exportfile['encoding']) as item: |
with open(exportfile['path'], mode = 'r', encoding = exportfile['encoding']) as item: |
|
... |
... |
def get_variationid(exportfile, sku): |
459 |
494 |
print("found ID in {0} value: {1}".format([*row][i], row[ [*row][i] ])) |
print("found ID in {0} value: {1}".format([*row][i], row[ [*row][i] ])) |
460 |
495 |
variationid = row[ [*row][i] ] |
variationid = row[ [*row][i] ] |
461 |
496 |
except Exception as err: |
except Exception as err: |
462 |
|
print("ERROR @ alternative header reading method in get_variationid: line: {0}, error: {1}" |
|
463 |
|
.format(sys.exc_info()[2].tb_lineno, err)) |
|
|
497 |
|
errorPrint("Looking for irregularities in getVariationId", |
|
498 |
|
err,sys.exc_info()[2].tb_lineno) |
464 |
499 |
print("press ENTER to continue...") |
print("press ENTER to continue...") |
465 |
500 |
input() |
input() |
466 |
501 |
if(not(variationid)): |
if(not(variationid)): |
467 |
502 |
print("No Variation ID found for {0}\n".format(sku)) |
print("No Variation ID found for {0}\n".format(sku)) |
|
503 |
|
warnPrint("No Variation ID found for "+sku, |
|
504 |
|
err,sys.exc_info()[2].tb_lineno) |
468 |
505 |
|
|
469 |
506 |
return variationid |
return variationid |
470 |
507 |
|
|