initBasti / Amazon2PlentySync (public) (License: GPLv3) (since 2019-01-27) (hash sha1)
Transfer your data from you Amazon Flatfile spreadsheet over to the Plentymarkets system. How to is included in the readme
List of commits:
Subject Hash Author Date (UTC)
improved error handling & adjusted names to naming convention cfcd91090a2598c6c51576bcdd53e03ab6c2f59b Sebastian Fricke 2020-01-15 14:47:42
Refactor CategoryChooser 562e6657c6fef89d0584731e54325cec013268a7 Sebastian Fricke 2020-01-15 14:42:11
Add category_config location to the script config 8698e4a99d63b06fde5c39787fc7d6f7400b9f47 Sebastian Fricke 2020-01-15 14:29:47
Refactor findConfig 321ae9d7edd69e8be0755cf5ba82289944d06ca3 Sebastian Fricke 2020-01-15 14:26:09
Add logging function: no category config warning e8323843a3b6c24ef274d6a12c10d76aa8b8f591 Sebastian Fricke 2020-01-14 14:38:39
Add module + test for the category-id config fadaf4515aab1009f4df4a1af5a2e8f82077bc4c Sebastian Fricke 2020-01-14 14:35:44
improved coding style on log functions caf97eec6c6026aa051bc98f02a90e649a6e4754 Sebastian Fricke 2020-01-14 10:23:17
fixed a typo in the product type list 707e993b953aea0d653ffbca136bbe81bb36ea13 Sebastian Fricke 2020-01-14 10:22:34
added home product properties, improved dictionary iteration, fixed a key error in get_attributes 30d4aed4403c39a6865e30c0384c3360d375cbb6 Sebastian Fricke 2020-01-14 10:21:56
removed warning for missing flatfile columns as requested bfe6e22f7acb282a3af8423c81ceacf9fcf21ef4 Sebastian Fricke 2020-01-13 15:05:27
added initialization for the position variable 8331f92d62deb9ba7be7e97201c7c6afa7cf732a Sebastian Fricke 2020-01-13 14:47:57
improved code style and fixed problem where the dictionary containing the path was given to functions instead of the path itself 1a5bf99751b599f48d4687a9a6cbd55ffe213f5a Sebastian Fricke 2020-01-13 14:47:13
removed Barcode missing warning on parents b592779c6cc1588e2ae40394cab53d0d047746e7 Sebastian Fricke 2020-01-13 14:46:16
Added support for the amazon product types furnitureanddecor, bedandbath, swimwear b56708e55c3283a6cc2d3803b2abbb99bb125928 Sebastian Fricke 2020-01-13 14:16:40
fix failing attribute sync 87ea4bce17eba6c9c9842eaf9eb26249bf8d7da5 Sebastian Fricke 2020-01-13 12:15:35
new config handling d65bdfae89eceab6b1319d01373cf70ac7d8b63e Sebastian Fricke 2019-11-13 08:57:14
Fixed a problem, that caused Data to not pass sorting; Fixed error handling with the product type; Updated category ids 9a62d369fb24bc80765cd19e31fb255398fb8ed5 Sebastian Fricke 2019-09-12 09:27:54
fixed a merge conflict bug e6b4d9613237009d980cdbfc7ec65c3383a3495a Sebastian Fricke 2019-08-16 11:31:02
current status 15.8 94db3a5c98c596b24f00624fa4b772b9fd830b03 Sebastian Fricke 2019-08-15 14:26:42
Added manual file choosing in case of empty config 2df178528d70be15bfb2e1c9058f69e128236622 Sebastian Fricke 2019-08-15 10:11:41
Commit cfcd91090a2598c6c51576bcdd53e03ab6c2f59b - improved error handling & adjusted names to naming convention
Author: Sebastian Fricke
Author date (UTC): 2020-01-15 14:47
Committer name: Sebastian Fricke
Committer date (UTC): 2020-01-15 14:47
Parent(s): 562e6657c6fef89d0584731e54325cec013268a7
Signer:
Signing key:
Signing status: N
Tree: f0f303c3a7a1e8a245631b97598ea570ec75d493
File Lines added Lines deleted
packages/item_upload.py 79 42
File packages/item_upload.py changed (mode: 100644) (index 730dbcf..a916f3e)
... ... import sys
3 3 import re import re
4 4 import chardet import chardet
5 5 import collections import collections
6 import colorama
7 import inspect
6 8 from sys import exit from sys import exit
7 9 from packages import barcode, amazon_data_upload, price_upload from packages import barcode, amazon_data_upload, price_upload
8 10
 
... ... from packages import barcode, amazon_data_upload, price_upload
10 12 class WrongEncodingException(Exception): class WrongEncodingException(Exception):
11 13 pass pass
12 14
15 try:
16 def errorPrint(msg, err, linenumber):
17 print(colorama.Fore.RED)
18 print("ERROR:\nline:{0}\t{1}\tError:{2}".format(linenumber, msg, err))
19 print(colorama.Style.RESET_ALL)
20 except AttributeError:
21 def errorPrint(msg, err, linenumber):
22 print("ERROR:\nline:{0}\t{1}\tError:{2}".format(linenumber, msg, err))
23
24 try:
25 def warnPrint(msg, err, linenumber):
26 print(colorama.Fore.YELLOW)
27 print("WARNING:\nline:{0}\t{1}\tWarning:{2}"
28 .format(linenumber, msg, err))
29 print(colorama.Style.RESET_ALL)
30 except AttributeError:
31 def warnPrint(msg, err, linenumber):
32 print("WARNING:\nline:{0}\t{1}\tWarning:{2}"
33 .format(linenumber, msg, err))
13 34
14 35 def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, filename): def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, filename):
15 36 # The column headers for the output file as expected from the # The column headers for the output file as expected from the
 
... ... def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f
53 74 # Get sets of all colors and sizes for each parent # Get sets of all colors and sizes for each parent
54 75 # to find if there are some with only one attribute value for all childs # to find if there are some with only one attribute value for all childs
55 76 color_size_sets = {} color_size_sets = {}
56 color_size_sets = find_similar_attr(flatfile)
77 color_size_sets = findSimilarAttr(flatfile)
57 78
58 79 # PACKAGE PROPERTIES # PACKAGE PROPERTIES
59 package_properties = get_properties(flatfile)
80 package_properties = getProperties(flatfile)
60 81 group_parent = '' group_parent = ''
61 82
62 83 try: try:
 
... ... def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f
88 109 position = 0 position = 0
89 110 if(row['parent_child'] == 'child'): if(row['parent_child'] == 'child'):
90 111 isParent = False isParent = False
91 attributes = get_attributes(dataset=row,
112 attributes = getAttributes(dataset=row,
92 113 sets=color_size_sets) sets=color_size_sets)
93 114 if(group_parent and row['parent_sku'] == group_parent): if(group_parent and row['parent_sku'] == group_parent):
94 115 position += 1 position += 1
95 116 except Exception as err: except Exception as err:
96 print("Error @ attribute setting, line:{0}, err:{1}"
97 .format(sys.exc_info()[2].tb_lineno, err))
117 warnPrint("Attribute setting failed", err,
118 sys.exc_info()[2].tb_lineno)
98 119 try: try:
99 120 values = [ values = [
100 121 row['parent_sku'], row['item_sku'], row['parent_sku'], row['item_sku'],
 
... ... def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f
133 154 .format(sys.exc_info()[2].tb_lineno, err)) .format(sys.exc_info()[2].tb_lineno, err))
134 155 Data[row['item_sku']] = collections.OrderedDict(zip(column_names, values)) Data[row['item_sku']] = collections.OrderedDict(zip(column_names, values))
135 156 except KeyError as err: except KeyError as err:
136 print("Error reading file\nline:{0}err:{1}"
137 .format(sys.exc_info()[2].tb_lineno, err))
157 errorPrint("Reading file failed", err,
158 sys.exc_info()[2].tb_lineno)
138 159 return row['item_sku'] return row['item_sku']
139 160
140 161 # open the intern number csv to get the item ID # open the intern number csv to get the item ID
 
... ... def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f
160 181 Data[row]['ASIN-type'] = barcode_data[row]['ASIN-type'] Data[row]['ASIN-type'] = barcode_data[row]['ASIN-type']
161 182 Data[row]['ASIN-value'] = barcode_data[row]['ASIN-value'] Data[row]['ASIN-value'] = barcode_data[row]['ASIN-value']
162 183 except Exception as err: except Exception as err:
163 print("ERROR @ Barcode Part for {0}.\n{1}.\n".format(row, err))
184 errorPrint("Barcode part for "+row, err,
185 sys.exc_info()[2].tb_lineno)
164 186
165 187 # Include the amazonsku # Include the amazonsku
166 188 sku_data = amazon_data_upload.amazonSkuUpload(flatfile) sku_data = amazon_data_upload.amazonSkuUpload(flatfile)
 
... ... def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f
173 195 Data[row]['amazon_sku'] = sku_data[row]['SKU'] Data[row]['amazon_sku'] = sku_data[row]['SKU']
174 196 Data[row]['amazon_parentsku'] = sku_data[row]['ParentSKU'] Data[row]['amazon_parentsku'] = sku_data[row]['ParentSKU']
175 197 except Exception as err: except Exception as err:
176 print("ERROR @ SKU Part for {0}.\n{1}.\n".format(row, err))
198 errorPrint("SKU part for "+row, err,
199 sys.exc_info()[2].tb_lineno)
177 200
178 201 # Include the amazonsku # Include the amazonsku
179 202 ama_data = amazon_data_upload.amazonDataUpload(flatfile) ama_data = amazon_data_upload.amazonDataUpload(flatfile)
 
... ... def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f
185 208 Data[row]['fba-enabled'] = ama_data[row]['ItemAmazonFBA'] Data[row]['fba-enabled'] = ama_data[row]['ItemAmazonFBA']
186 209 Data[row]['fba-shipping'] = ama_data[row]['ItemShippingWithAmazonFBA'] Data[row]['fba-shipping'] = ama_data[row]['ItemShippingWithAmazonFBA']
187 210 except Exception as err: except Exception as err:
188 print("ERROR @ AmazonData Part for {0}.\n{1}.\n".format(row, err))
211 errorPrint("AmazonData part for "+row, err,
212 sys.exc_info()[2].tb_lineno)
189 213
190 214 # Include the price # Include the price
191 215 price_data = price_upload.priceUpload(flatfile) price_data = price_upload.priceUpload(flatfile)
 
... ... def itemUpload(flatfile, intern, stocklist, attributefile, folder, input_data, f
199 223 Data[row]['webshop-price'] = price_data[row]['webshop'] Data[row]['webshop-price'] = price_data[row]['webshop']
200 224 Data[row]['etsy-price'] = price_data[row]['etsy'] Data[row]['etsy-price'] = price_data[row]['etsy']
201 225 except Exception as err: except Exception as err:
202 print("ERROR @ Price Part for {0}.\n{1}.\n".format(row, err))
226 errorPrint("Price part for "+row, err,
227 sys.exc_info()[2].tb_lineno)
203 228
204 229 # Write Data into new CSV for Upload # Write Data into new CSV for Upload
205 230 # OUTPUT # OUTPUT
206 231 # -------------------------------------------------------------- # --------------------------------------------------------------
207 232
208 233 # Sort the dictionary to make sure that the parents are the first variant of each item # Sort the dictionary to make sure that the parents are the first variant of each item
209 sorted_Data = sort_Products(Data)
234 sorted_Data = sortProducts(Data)
210 235
211 236 barcode.writeCSV(sorted_Data, "item", column_names, folder, filename) barcode.writeCSV(sorted_Data, "item", column_names, folder, filename)
212 237 except UnicodeDecodeError as err: except UnicodeDecodeError as err:
213 238 print("Decode Error at line: {0}, err: {1}".format(sys.exc_info()[2].tb_lineno, err)) print("Decode Error at line: {0}, err: {1}".format(sys.exc_info()[2].tb_lineno, err))
239 errorPrint("decoding problem", err,
240 sys.exc_info()[2].tb_lineno)
214 241 print("press ENTER to continue..") print("press ENTER to continue..")
215 242 input() input()
216 243 sys.exit() sys.exit()
 
... ... def itemPropertyUpload(flatfile, folder, filename):
260 287 use_names = [i for i in property_names if i in [*row]] use_names = [i for i in property_names if i in [*row]]
261 288 values = [row[i] for i in use_names] values = [row[i] for i in use_names]
262 289 except ValueError as err: except ValueError as err:
263 print("In property Upload: One of the values wasn't found : ", err)
290 warnPrint("No Value for "+i, err,
291 sys.exc_info()[2].tb_lineno)
264 292
265 293 # Check for empty values # Check for empty values
266 294 properties[row['item_sku']] = dict(zip(use_names, values)) properties[row['item_sku']] = dict(zip(use_names, values))
 
... ... def itemPropertyUpload(flatfile, folder, filename):
275 303
276 304 Data[row + prop] = dict(zip(column_names, values)) Data[row + prop] = dict(zip(column_names, values))
277 305 except KeyError as kerr: except KeyError as kerr:
278 print("ERROR: Key {0} was not found in the flatfile"
279 .format(kerr))
306 errorPrint("Key was not found in the flatfile", kerr,
307 sys.exc_info()[2].tb_lineno)
280 308
281 309
282 310 barcode.writeCSV(Data, "Item_Merkmale", column_names, folder, filename) barcode.writeCSV(Data, "Item_Merkmale", column_names, folder, filename)
283 311
284 def get_properties(flatfile):
312 def getProperties(flatfile):
285 313
286 314 properties = {'length':0, properties = {'length':0,
287 315 'width':0, 'width':0,
 
... ... def get_properties(flatfile):
316 344 properties[ 'width' ] = int(float(row['package_width'])) properties[ 'width' ] = int(float(row['package_width']))
317 345 properties[ 'weight' ] = int(float(row['package_weight'])) properties[ 'weight' ] = int(float(row['package_weight']))
318 346 except Exception as err: except Exception as err:
319 print("Error @ setting values: line:{0}, err:{1}"
320 .format(sys.exc_info()[2].tb_lineno, err))
347 errorPrint("getProperties setting values failed", err,
348 sys.exc_info()[2].tb_lineno)
321 349
322 350 except ValueError as err: except ValueError as err:
323 print(err)
324 print("/nPlease copy the values for height, length, width",
325 "and weight\nfrom the children to the parent",
326 "variation in the flatfile.\n")
351 errorPrint("Parent has no package measurements", err,
352 sys.exc_info()[2].tb_lineno)
327 353 exit() exit()
328 except Exception as err:
329 print("Error @ setting values: line:{0}, err:{1}"
330 .format(sys.exc_info()[2].tb_lineno, err))
331 354
332 355 return properties return properties
333 356
334 def get_attributes(dataset, sets):
357 def getAttributes(dataset, sets):
335 358
336 359 output_string = '' output_string = ''
337 360 try: try:
 
... ... def get_attributes(dataset, sets):
343 366 dataset['parent_sku'], ','.join([*sets]) dataset['parent_sku'], ','.join([*sets])
344 367 )) ))
345 368 except Exception as err: except Exception as err:
346 print("Error @ adding color to string (get_attributes)\nerr:{0}"
347 .format(err))
369 errorPrint("Adding of color attribute failed", err,
370 sys.exc_info()[2].tb_lineno)
348 371 try: try:
349 372 if(len(sets[dataset['parent_sku']]['size']) > 1): if(len(sets[dataset['parent_sku']]['size']) > 1):
350 373 if(not(output_string)): if(not(output_string)):
 
... ... def get_attributes(dataset, sets):
352 375 else: else:
353 376 output_string = output_string + ';size_name:' + dataset['size_name'] output_string = output_string + ';size_name:' + dataset['size_name']
354 377 except Exception as err: except Exception as err:
355 print("Error @ adding size to string\nerr:{0}"
356 .format(err))
378 errorPrint("Adding of size attribute failed", err,
379 sys.exc_info()[2].tb_lineno)
357 380 return output_string return output_string
358 381
359 def find_similar_attr(flatfile):
382 def findSimilarAttr(flatfile):
360 383
361 384 Data = {} Data = {}
362 385
 
... ... def find_similar_attr(flatfile):
377 400 Data[row['parent_sku']]['size'].add(row['size_name']) Data[row['parent_sku']]['size'].add(row['size_name'])
378 401 return Data return Data
379 402
380 def sort_Products(dataset):
403 def sortProducts(dataset):
381 404 item_list = dataset.items() item_list = dataset.items()
382 405 new_dict = collections.OrderedDict() new_dict = collections.OrderedDict()
383 406 parent_dict = collections.OrderedDict() parent_dict = collections.OrderedDict()
 
... ... def sort_Products(dataset):
391 414 # add the parent to the new dict # add the parent to the new dict
392 415 new_dict[item[0]] = item[1] new_dict[item[0]] = item[1]
393 416 # get all the children and update the itemlist without them # get all the children and update the itemlist without them
394 child_dict = search_child(item_list=item_list, parent=item[0])
417 child_dict = searchChild(item_list=item_list, parent=item[0])
395 418 # add each child to the new dict after the parent # add each child to the new dict after the parent
396 419 for child in child_dict: for child in child_dict:
397 420 new_dict[child] = child_dict[child] new_dict[child] = child_dict[child]
398 421
399 422 return new_dict return new_dict
400 423
401 def search_child(item_list, parent):
424 def searchChild(item_list, parent):
402 425 child_dict = collections.OrderedDict() child_dict = collections.OrderedDict()
403 426
404 427 for item in item_list: for item in item_list:
 
... ... def search_child(item_list, parent):
407 430
408 431 return child_dict return child_dict
409 432
410 def check_flatfile(flatfile):
433 def checkFlatfile(flatfile):
411 434 try: try:
412 435 with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item: with open(flatfile['path'], mode='r', encoding=flatfile['encoding']) as item:
413 436 reader = csv.DictReader(item, delimiter=';') reader = csv.DictReader(item, delimiter=';')
414 437
415 438 first_row = [* list(reader)[0] ] first_row = [* list(reader)[0] ]
439 if(len(first_row) == 1):
440 errorPrint("Wrong delimiter, use ';'",
441 'False delimiter detected',
442 inspect.currentframe().f_back.f_lineno)
443 return False
416 444 if(not( 'feed_product_type' in first_row )): if(not( 'feed_product_type' in first_row )):
417 445 if( 'Marke' in first_row ): if( 'Marke' in first_row ):
446 errorPrint("Only use the last of the 3 header lines", err,
447 sys.exc_info()[2].tb_lineno)
418 448 print("Please cut the first two rows from the flatfile for this script\n") print("Please cut the first two rows from the flatfile for this script\n")
419 449 return False return False
420 450 else: else:
421 print("This file contains the wrong column header\n{0}\n".format(','.join(first_row)))
451 errorPrint("Wrong header line", err,
452 sys.exc_info()[2].tb_lineno)
422 453 return False return False
423 454 else: else:
424 455 return True return True
425 456 except Exception as err: except Exception as err:
426 print("ERROR @ flatfile checking : {0}".format(err))
457 warnPrint("Flatfile check failed", err,
458 sys.exc_info()[2].tb_lineno)
427 459
428 def check_encoding(file_dict):
460 def checkEncoding(file_dict):
429 461 try: try:
430 462 with open(file_dict['path'], mode='rb') as item: with open(file_dict['path'], mode='rb') as item:
431 463 try: try:
432 464 raw_data = item.read() raw_data = item.read()
433 465 except Exception as err: except Exception as err:
434 466 print("ERROR: {0}\n".format(err)) print("ERROR: {0}\n".format(err))
467 errorPrint("check Encoding reading failed", err,
468 sys.exc_info()[2].tb_lineno)
435 469 file_dict['encoding'] = chardet.detect(raw_data)['encoding'] file_dict['encoding'] = chardet.detect(raw_data)['encoding']
436 470 print("chardet data for {0}\n{1}\n".format(file_dict['path'], chardet.detect(raw_data))) print("chardet data for {0}\n{1}\n".format(file_dict['path'], chardet.detect(raw_data)))
437 471
438 472 except Exception as err: except Exception as err:
439 print("Error : {0}\n".format(err))
473 errorPrint("check Encoding failed", err,
474 sys.exc_info()[2].tb_lineno)
440 475
441 476 return file_dict return file_dict
442 477
443 def get_variationid(exportfile, sku):
478 def getVariationId(exportfile, sku):
444 479
445 480 variationid = 0 variationid = 0
446 481 with open(exportfile['path'], mode = 'r', encoding = exportfile['encoding']) as item: with open(exportfile['path'], mode = 'r', encoding = exportfile['encoding']) as item:
 
... ... def get_variationid(exportfile, sku):
459 494 print("found ID in {0} value: {1}".format([*row][i], row[ [*row][i] ])) print("found ID in {0} value: {1}".format([*row][i], row[ [*row][i] ]))
460 495 variationid = row[ [*row][i] ] variationid = row[ [*row][i] ]
461 496 except Exception as err: except Exception as err:
462 print("ERROR @ alternative header reading method in get_variationid: line: {0}, error: {1}"
463 .format(sys.exc_info()[2].tb_lineno, err))
497 errorPrint("Looking for irregularities in getVariationId",
498 err,sys.exc_info()[2].tb_lineno)
464 499 print("press ENTER to continue...") print("press ENTER to continue...")
465 500 input() input()
466 501 if(not(variationid)): if(not(variationid)):
467 502 print("No Variation ID found for {0}\n".format(sku)) print("No Variation ID found for {0}\n".format(sku))
503 warnPrint("No Variation ID found for "+sku,
504 err,sys.exc_info()[2].tb_lineno)
468 505
469 506 return variationid return variationid
470 507
Hints:
Before first commit, do not forget to setup your git environment:
git config --global user.name "your_name_here"
git config --global user.email "your@email_here"

Clone this repository using HTTP(S):
git clone https://rocketgit.com/user/initBasti/Amazon2PlentySync

Clone this repository using ssh (do not forget to upload a key first):
git clone ssh://rocketgit@ssh.rocketgit.com/user/initBasti/Amazon2PlentySync

Clone this repository using git:
git clone git://git.rocketgit.com/user/initBasti/Amazon2PlentySync

You are allowed to anonymously push to this repository.
This means that your pushed commits will automatically be transformed into a merge request:
... clone the repository ...
... make some changes and some commits ...
git push origin main