First commit
This commit is contained in:
commit
a2663d15be
69
.drone.yml
Normal file
69
.drone.yml
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
kind: pipeline
|
||||||
|
type: docker
|
||||||
|
name: default
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: clean environment
|
||||||
|
image: appleboy/drone-ssh
|
||||||
|
settings:
|
||||||
|
host:
|
||||||
|
from_secret: host
|
||||||
|
user:
|
||||||
|
from_secret: user
|
||||||
|
key:
|
||||||
|
from_secret: git_usr_rsa_key
|
||||||
|
port:
|
||||||
|
from_secret: port
|
||||||
|
script:
|
||||||
|
- cd /home/administrador/applications/hive_sales_sync/app || exit
|
||||||
|
- echo > dummy.txt || exit
|
||||||
|
- rm -rf *
|
||||||
|
- name: copy conf file
|
||||||
|
image: appleboy/drone-ssh
|
||||||
|
depends_on:
|
||||||
|
- clean environment
|
||||||
|
settings:
|
||||||
|
host:
|
||||||
|
from_secret: host
|
||||||
|
user:
|
||||||
|
from_secret: user
|
||||||
|
key:
|
||||||
|
from_secret: git_usr_rsa_key
|
||||||
|
port:
|
||||||
|
from_secret: port
|
||||||
|
script:
|
||||||
|
- cd /home/administrador/applications/hive_sales_sync || exit
|
||||||
|
- cp conf.json app || exit
|
||||||
|
- name: copy files
|
||||||
|
image: appleboy/drone-scp
|
||||||
|
depends_on:
|
||||||
|
- clean environment
|
||||||
|
settings:
|
||||||
|
host:
|
||||||
|
from_secret: host
|
||||||
|
user:
|
||||||
|
from_secret: user
|
||||||
|
key:
|
||||||
|
from_secret: git_usr_rsa_key
|
||||||
|
port:
|
||||||
|
from_secret: port
|
||||||
|
command_timeout: 2m
|
||||||
|
target: /home/administrador/applications/hive_sales_sync/app
|
||||||
|
source: ./
|
||||||
|
- name: rm git
|
||||||
|
image: appleboy/drone-ssh
|
||||||
|
depends_on:
|
||||||
|
- copy files
|
||||||
|
settings:
|
||||||
|
host:
|
||||||
|
from_secret: host
|
||||||
|
user:
|
||||||
|
from_secret: user
|
||||||
|
key:
|
||||||
|
from_secret: git_usr_rsa_key
|
||||||
|
port:
|
||||||
|
from_secret: port
|
||||||
|
script:
|
||||||
|
- cd /home/administrador/applications/hive_sales_sync/app || exit
|
||||||
|
- rm -rf .git
|
||||||
|
- hostname
|
||||||
240
.gitignore
vendored
Normal file
240
.gitignore
vendored
Normal file
@ -0,0 +1,240 @@
|
|||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
#pdm.lock
|
||||||
|
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||||
|
# in version control.
|
||||||
|
# https://pdm.fming.dev/#use-with-ide
|
||||||
|
.pdm.toml
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
||||||
|
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
||||||
|
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||||
|
|
||||||
|
# User-specific stuff
|
||||||
|
.idea/**/workspace.xml
|
||||||
|
.idea/**/tasks.xml
|
||||||
|
.idea/**/usage.statistics.xml
|
||||||
|
.idea/**/dictionaries
|
||||||
|
.idea/**/shelf
|
||||||
|
|
||||||
|
# AWS User-specific
|
||||||
|
.idea/**/aws.xml
|
||||||
|
|
||||||
|
# Generated files
|
||||||
|
.idea/**/contentModel.xml
|
||||||
|
|
||||||
|
# Sensitive or high-churn files
|
||||||
|
.idea/**/dataSources/
|
||||||
|
.idea/**/dataSources.ids
|
||||||
|
.idea/**/dataSources.local.xml
|
||||||
|
.idea/**/sqlDataSources.xml
|
||||||
|
.idea/**/dynamic.xml
|
||||||
|
.idea/**/uiDesigner.xml
|
||||||
|
.idea/**/dbnavigator.xml
|
||||||
|
|
||||||
|
# Gradle
|
||||||
|
.idea/**/gradle.xml
|
||||||
|
.idea/**/libraries
|
||||||
|
|
||||||
|
# Gradle and Maven with auto-import
|
||||||
|
# When using Gradle or Maven with auto-import, you should exclude module files,
|
||||||
|
# since they will be recreated, and may cause churn. Uncomment if using
|
||||||
|
# auto-import.
|
||||||
|
# .idea/artifacts
|
||||||
|
# .idea/compiler.xml
|
||||||
|
# .idea/jarRepositories.xml
|
||||||
|
# .idea/modules.xml
|
||||||
|
# .idea/*.iml
|
||||||
|
# .idea/modules
|
||||||
|
# *.iml
|
||||||
|
# *.ipr
|
||||||
|
|
||||||
|
# CMake
|
||||||
|
cmake-build-*/
|
||||||
|
|
||||||
|
# Mongo Explorer plugin
|
||||||
|
.idea/**/mongoSettings.xml
|
||||||
|
|
||||||
|
# File-based project format
|
||||||
|
*.iws
|
||||||
|
|
||||||
|
# IntelliJ
|
||||||
|
out/
|
||||||
|
|
||||||
|
# mpeltonen/sbt-idea plugin
|
||||||
|
.idea_modules/
|
||||||
|
|
||||||
|
# JIRA plugin
|
||||||
|
atlassian-ide-plugin.xml
|
||||||
|
|
||||||
|
# Cursive Clojure plugin
|
||||||
|
.idea/replstate.xml
|
||||||
|
|
||||||
|
# SonarLint plugin
|
||||||
|
.idea/sonarlint/
|
||||||
|
|
||||||
|
# Crashlytics plugin (for Android Studio and IntelliJ)
|
||||||
|
com_crashlytics_export_strings.xml
|
||||||
|
crashlytics.properties
|
||||||
|
crashlytics-build.properties
|
||||||
|
fabric.properties
|
||||||
|
|
||||||
|
# Editor-based Rest Client
|
||||||
|
.idea/httpRequests
|
||||||
|
|
||||||
|
# Android studio 3.1+ serialized cache file
|
||||||
|
.idea/caches/build_file_checksums.ser
|
||||||
|
|
||||||
|
conf.json
|
||||||
|
|
||||||
35
README.md
Normal file
35
README.md
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
# Domino's SV interface
|
||||||
|
|
||||||
|
This interface will sync sales to HIVE according to given paramenters
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# sync sales for specific branch for today
|
||||||
|
python3 dosv.py --branch=14759
|
||||||
|
|
||||||
|
# sync sales for specific branch and date
|
||||||
|
python3 dosv.py --branch=14759 --start-date=20230701
|
||||||
|
|
||||||
|
# sync (specific) sales for specific branch and date
|
||||||
|
# all is the default behaviour this command is exactly similar to the one above
|
||||||
|
python3 dosv.py --branch=14759 --start-date=20230701 --orders=all
|
||||||
|
|
||||||
|
# specifying sales
|
||||||
|
python3 dosv.py --branch=14759 --start-date=20230701 --orders=54678,673342
|
||||||
|
|
||||||
|
# ALL THE COMMANDS ABOVE WILL SEARCH THE ORDERS (ALL OR SPECIFIC) IN
|
||||||
|
# POS.DBO.ORDERS TABLE AND WILL ATTEMPT TO INSERT THEM IN HIVE_SALES
|
||||||
|
# IF THE ORDERS ALREADY EXISTS IN HIVE_SALES THEY WILL BE NOT INSERTED
|
||||||
|
# AND THEIR STATUS WILL REMAIN AS IT IS, FOR IGNORE THIS BEHAVIOUR
|
||||||
|
# YOU MUST PASS THE FLAG --reprocess=true THIS WILL UPDATE THE SALES TO STATUS=0
|
||||||
|
# IN HIVE SALES
|
||||||
|
|
||||||
|
# reprocess all branches for today
|
||||||
|
python3 dosv.py --reprocess=true
|
||||||
|
|
||||||
|
# reprocess specific branch for given date
|
||||||
|
python3 dosv.py --branch=14759 --start-date=20230701 --reprocess=true
|
||||||
|
|
||||||
|
# reprocess specific branch for given date and orders
|
||||||
|
python3 dosv.py --branch=14759 --start-date=20230701 --orders=54678,673342 --reprocess=true
|
||||||
|
|
||||||
|
```
|
||||||
125
docr.py
Normal file
125
docr.py
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import pyodbc
|
||||||
|
import sys
|
||||||
|
from datetime import date, datetime
|
||||||
|
from models.docr.sale import SaleModel
|
||||||
|
from pathlib import Path
|
||||||
|
from utils.timezone import set_current
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
console_handler = logging.StreamHandler()
|
||||||
|
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||||
|
console_handler.setFormatter(formatter)
|
||||||
|
logger.addHandler(console_handler)
|
||||||
|
|
||||||
|
|
||||||
|
def read_sql(file_name: str) -> str:
|
||||||
|
with open(Path(__file__).resolve().parent / 'sql' / 'docr' / file_name) as sqlf:
|
||||||
|
return sqlf.read()
|
||||||
|
|
||||||
|
|
||||||
|
def main(args, conf):
|
||||||
|
for database in conf['databases']:
|
||||||
|
logger.debug(f"attempt to process branch {database['code']} {database['branch']}")
|
||||||
|
if args.branch != 'all' and args.branch != database['code']:
|
||||||
|
logger.debug(
|
||||||
|
f"branch {database['code']} {database['branch']} is not the one specified and not all was specified"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
threads = []
|
||||||
|
# Fetch SQL sentences
|
||||||
|
prepare_sales = read_sql(
|
||||||
|
'new_sales.sql' if args.orders == 'all' else 'new_sales_specific.sql'
|
||||||
|
)
|
||||||
|
reprocess_sales = read_sql(
|
||||||
|
'reprocess_sales.sql' if args.orders == 'all' else 'reprocess_sales_specific.sql'
|
||||||
|
)
|
||||||
|
process_sales = read_sql('queue_table.sql')
|
||||||
|
|
||||||
|
with pyodbc.connect(conf['dsn'] % database) as conn:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
# Insert Sales into Table
|
||||||
|
if args.orders == 'all':
|
||||||
|
logger.debug(f"all sales with date {args.start_date}")
|
||||||
|
cur.execute(prepare_sales, args.start_date)
|
||||||
|
else:
|
||||||
|
logger.debug(f"specific sales with date {args.start_date} sales {args.orders}")
|
||||||
|
cur.execute(prepare_sales, args.start_date, args.orders)
|
||||||
|
conn.commit()
|
||||||
|
logger.debug(f"Number of records inserted: {cur.rowcount}")
|
||||||
|
|
||||||
|
# Reprocess, set status code all or specific in hive_sales table
|
||||||
|
if args.reprocess:
|
||||||
|
if args.orders == 'all':
|
||||||
|
cur.execute(reprocess_sales, args.start_date)
|
||||||
|
else:
|
||||||
|
cur.execute(reprocess_sales, args.start_date, args.orders)
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
# Fetch Sales
|
||||||
|
cur.execute(process_sales)
|
||||||
|
sales = cur.fetchall()
|
||||||
|
logger.debug(f"sales to process {len(sales)}")
|
||||||
|
|
||||||
|
# This loop is to execute in batches of conf['max_threads']
|
||||||
|
max_thread = int(conf['max_threads'])
|
||||||
|
for i in range(0, len(sales), max_thread):
|
||||||
|
ss = sales[i:i + max_thread]
|
||||||
|
for s in ss:
|
||||||
|
sale = SaleModel(s, conn=conf['dsn'] % database)
|
||||||
|
sale.endpoint_url = conf['url']
|
||||||
|
sale.token = conf['tokens']['DOCR']
|
||||||
|
sale.start()
|
||||||
|
threads.append(sale)
|
||||||
|
|
||||||
|
# Wait for all threads to finish
|
||||||
|
logger.debug("waiting for threads to finish")
|
||||||
|
for thread in threads:
|
||||||
|
thread.join()
|
||||||
|
except Exception as e:
|
||||||
|
print(
|
||||||
|
f"{datetime.now()} error when processing branch {database['code']}-{database['branch']} => {e}",
|
||||||
|
file=sys.stderr
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = argparse.ArgumentParser(prog='Sales SYNC Script')
|
||||||
|
parser.add_argument(
|
||||||
|
'--start-date',
|
||||||
|
help='date for searching',
|
||||||
|
default=date.today().strftime('%Y%m%d'),
|
||||||
|
type=lambda s: datetime.strptime(s, '%Y%m%d')
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--branch',
|
||||||
|
help='branch for loading',
|
||||||
|
default='all',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--orders',
|
||||||
|
help='which orders are trying to fetch',
|
||||||
|
default='all',
|
||||||
|
type=str,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--reprocess',
|
||||||
|
help='flag which indicates if (all or specific) sale(s) will be reprocessed',
|
||||||
|
default=False,
|
||||||
|
type=bool,
|
||||||
|
)
|
||||||
|
sys_args = parser.parse_args(sys.argv[1:])
|
||||||
|
|
||||||
|
with open(Path(__file__).resolve().parent / 'conf.json') as f:
|
||||||
|
configuration = json.load(f)
|
||||||
|
|
||||||
|
set_current(configuration['timezone'])
|
||||||
|
|
||||||
|
logger.debug(f"starting process {datetime.now()}")
|
||||||
|
main(sys_args, configuration)
|
||||||
|
logger.debug(f"finished process {datetime.now()}")
|
||||||
0
models/__init__.py
Normal file
0
models/__init__.py
Normal file
0
models/docr/__init__.py
Normal file
0
models/docr/__init__.py
Normal file
46
models/docr/coupon.py
Normal file
46
models/docr/coupon.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
from orm import fields, model
|
||||||
|
|
||||||
|
|
||||||
|
class SaleCouponModel(model.Model):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.instance_id = fields.ThisModelValue(db_column='OrdCpnNbr')
|
||||||
|
self.code = fields.ThisModelValue(db_column='CouponCode')
|
||||||
|
self.discounts = fields.ThisModelDecimalValue(
|
||||||
|
db_column='OrdCpnCouponDiscountAmt * -1', db_alias='discounts', decimal_places=4
|
||||||
|
)
|
||||||
|
self.description = fields.One2OneValue(
|
||||||
|
db_column='CouponDescText',
|
||||||
|
db_table='pos.dbo.Coupons2',
|
||||||
|
join=(
|
||||||
|
('Location_Code', 'Location_Code'),
|
||||||
|
('CouponCode', 'CouponCode'),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
class Conf:
|
||||||
|
db_table = 'pos.dbo.OrderCoupons'
|
||||||
|
filters = ('Location_Code', 'Order_Date', 'Order_Number')
|
||||||
|
|
||||||
|
|
||||||
|
class ItemCouponModel(model.Model):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.instance_id = fields.ThisModelValue(db_column='OrdCpnNbr')
|
||||||
|
self.code = fields.One2OneValue(
|
||||||
|
db_column='CouponCode',
|
||||||
|
db_table='pos.dbo.OrderCoupons',
|
||||||
|
join=(
|
||||||
|
('Location_Code', 'Location_Code'),
|
||||||
|
('Order_Date', 'Order_Date'),
|
||||||
|
('Order_Number', 'Order_Number'),
|
||||||
|
('OrdCpnNbr', 'OrdCpnNbr')
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.amount = fields.ThisModelDecimalValue(
|
||||||
|
db_column='OrdLineCpnCouponDiscountAmt * -1', db_alias='amount', decimal_places=4
|
||||||
|
)
|
||||||
|
|
||||||
|
class Conf:
|
||||||
|
db_table = 'pos.dbo.OrderLineCoupons'
|
||||||
|
filters = ('Location_Code', 'Order_Date', 'Order_Number', 'Line_Number')
|
||||||
36
models/docr/items.py
Normal file
36
models/docr/items.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
from .coupon import ItemCouponModel
|
||||||
|
from orm import fields, model
|
||||||
|
|
||||||
|
|
||||||
|
class ItemModel(model.Model):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.item_id = fields.FixedValue(value=1)
|
||||||
|
self.item_type = fields.FixedValue(value='product')
|
||||||
|
self.item_code = fields.ThisModelValue(db_column='ProductCode')
|
||||||
|
self.description = fields.ThisModelValue(db_column='OrdLineDescription')
|
||||||
|
self.quantity = fields.ThisModelValue(db_column='Quantity')
|
||||||
|
self.cost = fields.ThisModelDecimalValue(
|
||||||
|
db_column='IFC', decimal_places=4
|
||||||
|
)
|
||||||
|
self.unit_price = fields.ThisModelDecimalValue(
|
||||||
|
db_column='Price/Quantity', db_alias='unit_price', decimal_places=4
|
||||||
|
)
|
||||||
|
self.net_price = fields.ThisModelDecimalValue(
|
||||||
|
db_column='Price', decimal_places=4
|
||||||
|
)
|
||||||
|
self.total_price = fields.ThisModelDecimalValue(
|
||||||
|
db_column='OrdLineFinalPrice', decimal_places=4
|
||||||
|
)
|
||||||
|
self.taxes = fields.ThisModelValueCustomSerializer(
|
||||||
|
db_column='OrdLineTaxAmt',
|
||||||
|
value_serialized=lambda v: [{"code": 1, "amount": round(float(v), 4)}]
|
||||||
|
)
|
||||||
|
self.discounts = ItemCouponModel(
|
||||||
|
['Location_Code', 'Order_Date', 'Order_Number', 'Line_Number'],
|
||||||
|
raise_not_found_error=False
|
||||||
|
)
|
||||||
|
|
||||||
|
class Conf:
|
||||||
|
db_table = 'POS.dbo.Order_Lines'
|
||||||
|
filters = ('Location_Code', 'Order_Date', 'Order_Number')
|
||||||
24
models/docr/payment.py
Normal file
24
models/docr/payment.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
from orm import fields, model
|
||||||
|
|
||||||
|
|
||||||
|
class PaymentModel(model.Model):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.payment_type = fields.ThisModelValue(
|
||||||
|
db_column='concat(Order_Pay_Type_Code, Credit_Card_ID)',
|
||||||
|
db_alias='payment_type'
|
||||||
|
)
|
||||||
|
self.reference = fields.ThisModelValue(db_column='OrdPayEPayRefNumber')
|
||||||
|
self.amount = fields.ThisModelDecimalValue(
|
||||||
|
db_column='sum(OrdPayAmt)', db_alias='amount', decimal_places=4
|
||||||
|
)
|
||||||
|
|
||||||
|
class Conf:
|
||||||
|
db_table = 'pos.dbo.OrderPayments2'
|
||||||
|
filters = ('Location_Code', 'Order_Date', 'Order_Number')
|
||||||
|
|
||||||
|
def get_where(self):
|
||||||
|
where = super().get_where()
|
||||||
|
return f'{where} ' \
|
||||||
|
f'AND OrdPayStatusCode != 3 \n' \
|
||||||
|
f'GROUP BY concat(Order_Pay_Type_Code, Credit_Card_ID), OrdPayEPayRefNumber'
|
||||||
66
models/docr/sale.py
Normal file
66
models/docr/sale.py
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
from .coupon import SaleCouponModel
|
||||||
|
from .items import ItemModel
|
||||||
|
from .payment import PaymentModel
|
||||||
|
from orm import fields, model
|
||||||
|
|
||||||
|
|
||||||
|
class SaleModel(model.Model):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.customer = fields.FixedValue(value=3475)
|
||||||
|
self.vendor = fields.FixedValue(value=587)
|
||||||
|
self.uuid = fields.ThisModelValue(db_column='OrderUUID')
|
||||||
|
self.branch = fields.ThisModelValue(db_column='Location_Code')
|
||||||
|
self.pos = fields.NullValue()
|
||||||
|
self.order_num = fields.ThisModelValue(db_column='Order_Number')
|
||||||
|
self.doc_type = fields.FixedValue(value=4)
|
||||||
|
self.generated_dt = fields.ThisModelDateTimeValue(db_column='Added')
|
||||||
|
self.shift_dt = fields.ThisModelDateTimeValue(db_column='Order_Date')
|
||||||
|
self.cost = fields.ThisModelDecimalValue(
|
||||||
|
db_column='OrderIdealFoodCost', decimal_places=4
|
||||||
|
)
|
||||||
|
self.net_amt = fields.ThisModelDecimalValue(
|
||||||
|
db_column='SubTotal', decimal_places=4
|
||||||
|
)
|
||||||
|
self.tax_amt = fields.ThisModelDecimalValue(
|
||||||
|
db_column='Sales_Tax1', decimal_places=4
|
||||||
|
)
|
||||||
|
self.total_amt = fields.ThisModelDecimalValue(
|
||||||
|
db_column='OrderFinalPrice', decimal_places=4
|
||||||
|
)
|
||||||
|
self.discounts_amt = fields.ThisModelDecimalValue(
|
||||||
|
db_column='OrderLineDiscountAmt', decimal_places=4
|
||||||
|
)
|
||||||
|
self.official_doc = fields.One2OneValue(
|
||||||
|
db_column='ReceiptNumber',
|
||||||
|
db_table='GovernmentReceipt.dbo.ReceiptHistory',
|
||||||
|
join=(
|
||||||
|
('Order_Number', 'OrderNumber'),
|
||||||
|
('Order_Date', 'OrderDate'),
|
||||||
|
('Location_Code', 'LocationCode')
|
||||||
|
),
|
||||||
|
raise_not_found_error=False
|
||||||
|
)
|
||||||
|
self.delivery_channel = fields.One2OneValue(
|
||||||
|
db_column='Description',
|
||||||
|
db_table='pos.dbo.ServiceMethods',
|
||||||
|
join=(
|
||||||
|
('Location_Code', 'Location_Code'),
|
||||||
|
('Order_Type_Code', 'ServiceMethodCode')
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.payments = PaymentModel(
|
||||||
|
['Location_Code', 'Order_Date', 'Order_Number']
|
||||||
|
)
|
||||||
|
self.items = ItemModel(
|
||||||
|
['Location_Code', 'Order_Date', 'Order_Number']
|
||||||
|
)
|
||||||
|
self.discounts = SaleCouponModel(
|
||||||
|
['Location_Code', 'Order_Date', 'Order_Number'],
|
||||||
|
raise_not_found_error=False
|
||||||
|
)
|
||||||
|
|
||||||
|
class Conf:
|
||||||
|
db_table = 'pos.dbo.Orders'
|
||||||
|
filters = ('Location_Code', 'Order_Date', 'Order_Number')
|
||||||
|
log_table = 'pos.dbo.hive_sales'
|
||||||
0
orm/__init__.py
Normal file
0
orm/__init__.py
Normal file
15
orm/fields/__init__.py
Normal file
15
orm/fields/__init__.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
from .common import FixedValue, NullValue, ThisModelValue, ThisModelValueCustomSerializer, \
|
||||||
|
ThisModelDateTimeValue, ThisModelDecimalValue, ThisModelRawResultCustomSerializer
|
||||||
|
from .one2one import One2OneValue
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'FixedValue',
|
||||||
|
'NullValue',
|
||||||
|
'ThisModelValue',
|
||||||
|
'ThisModelValueCustomSerializer',
|
||||||
|
'ThisModelRawResultCustomSerializer',
|
||||||
|
'ThisModelDateTimeValue',
|
||||||
|
'ThisModelDecimalValue',
|
||||||
|
'One2OneValue'
|
||||||
|
]
|
||||||
17
orm/fields/base.py
Normal file
17
orm/fields/base.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
class AttributeGetter:
|
||||||
|
mandatory = []
|
||||||
|
|
||||||
|
# noinspection PyUnusedLocal
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
if not hasattr(self, 'value'):
|
||||||
|
self.value = None
|
||||||
|
|
||||||
|
for m in self.mandatory:
|
||||||
|
if not hasattr(self, m) or getattr(self, m) is None:
|
||||||
|
raise ValueError(f'Value attribute must have {m} assigned')
|
||||||
|
|
||||||
|
def get_value(self):
|
||||||
|
return self.value
|
||||||
|
|
||||||
|
def get_value_serialized(self):
|
||||||
|
return self.value
|
||||||
70
orm/fields/common.py
Normal file
70
orm/fields/common.py
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
from .base import AttributeGetter
|
||||||
|
from utils import timezone
|
||||||
|
|
||||||
|
|
||||||
|
class FixedValue(AttributeGetter):
|
||||||
|
mandatory = ['value']
|
||||||
|
|
||||||
|
def __init__(self, value=None, *args, **kwargs):
|
||||||
|
self.value = value
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class NullValue(AttributeGetter):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ThisModelValue(AttributeGetter):
|
||||||
|
mandatory = ['db_column']
|
||||||
|
|
||||||
|
def __init__(self, db_column=None, db_alias=None, *args, **kwargs):
|
||||||
|
self.db_column = db_column
|
||||||
|
self.db_alias = db_column if not db_alias else db_alias
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def get_column(self):
|
||||||
|
if self.db_column != self.db_alias:
|
||||||
|
return f'{self.db_column} AS {self.db_alias}'
|
||||||
|
return self.db_column
|
||||||
|
|
||||||
|
|
||||||
|
class ThisModelDateTimeValue(ThisModelValue):
|
||||||
|
def get_value_serialized(self):
|
||||||
|
return timezone.make_aware(self.value).isoformat()
|
||||||
|
|
||||||
|
|
||||||
|
class ThisModelDecimalValue(ThisModelValue):
|
||||||
|
mandatory = ['db_column', 'decimal_places']
|
||||||
|
|
||||||
|
def __init__(self, db_column=None, decimal_places=None, *args, **kwargs):
|
||||||
|
self.db_column = db_column
|
||||||
|
self.decimal_places = decimal_places
|
||||||
|
super().__init__(db_column=db_column, *args, **kwargs)
|
||||||
|
|
||||||
|
def get_value_serialized(self):
|
||||||
|
return round(float(self.value or 0), self.decimal_places)
|
||||||
|
|
||||||
|
|
||||||
|
class ThisModelValueCustomSerializer(ThisModelValue):
|
||||||
|
mandatory = ['db_column', 'value_serialized']
|
||||||
|
|
||||||
|
def __init__(self, db_column=None, value_serialized=None, *args, **kwargs):
|
||||||
|
self.db_column = db_column
|
||||||
|
self.value_serialized = value_serialized
|
||||||
|
super().__init__(db_column=db_column, *args, **kwargs)
|
||||||
|
|
||||||
|
def get_value_serialized(self):
|
||||||
|
return self.value_serialized(self.value)
|
||||||
|
|
||||||
|
|
||||||
|
class ThisModelRawResultCustomSerializer(ThisModelValue):
|
||||||
|
mandatory = ['db_column', 'value_serialized']
|
||||||
|
|
||||||
|
def __init__(self, db_column=None, value_serialized=None, *args, **kwargs):
|
||||||
|
self.raw_result = None
|
||||||
|
self.db_column = db_column
|
||||||
|
self.value_serialized = value_serialized
|
||||||
|
super().__init__(db_column=db_column, *args, **kwargs)
|
||||||
|
|
||||||
|
def get_value_serialized(self):
|
||||||
|
return self.value_serialized(self.raw_result)
|
||||||
26
orm/fields/one2one.py
Normal file
26
orm/fields/one2one.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
from .base import AttributeGetter
|
||||||
|
|
||||||
|
|
||||||
|
class One2OneValue(AttributeGetter):
|
||||||
|
mandatory = ['db_column', 'db_table', 'join']
|
||||||
|
|
||||||
|
def __init__(self, db_column=None, db_table=None, join=None, raise_not_found_error=True, *args, **kwargs):
|
||||||
|
self.db_column = db_column
|
||||||
|
self.db_table = db_table
|
||||||
|
self.join = join
|
||||||
|
self.raise_not_found_error = raise_not_found_error
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def get_where(self):
|
||||||
|
filters = ' AND '.join([f'{f[1]} = ?' for f in self.join])
|
||||||
|
return f'WHERE \n {filters}'
|
||||||
|
|
||||||
|
def get_select(self):
|
||||||
|
return f'SELECT \n {self.db_column}'
|
||||||
|
|
||||||
|
def get_from(self):
|
||||||
|
# noinspection SpellCheckingInspection
|
||||||
|
return f'FROM {self.db_table} (NOLOCK)'
|
||||||
|
|
||||||
|
def get_query(self):
|
||||||
|
return f'{self.get_select()} \n{self.get_from()} \n{self.get_where()}'
|
||||||
236
orm/model.py
Normal file
236
orm/model.py
Normal file
@ -0,0 +1,236 @@
|
|||||||
|
import json
|
||||||
|
import pyodbc
|
||||||
|
import requests
|
||||||
|
from .fields import ThisModelValue, One2OneValue
|
||||||
|
from .fields.base import AttributeGetter
|
||||||
|
from threading import Thread
|
||||||
|
|
||||||
|
|
||||||
|
class Model(Thread):
|
||||||
|
class Conf:
|
||||||
|
db_table = ''
|
||||||
|
filters = []
|
||||||
|
log_table = ''
|
||||||
|
|
||||||
|
def __init__(self, filters: tuple, conn: str = None, raise_not_found_error: bool = True):
|
||||||
|
super().__init__()
|
||||||
|
self.conf = self.Conf()
|
||||||
|
self.filters = filters
|
||||||
|
self.raw_result = {}
|
||||||
|
self.many = []
|
||||||
|
self.conn_str = conn
|
||||||
|
self.raise_not_found_error = raise_not_found_error
|
||||||
|
self.cursor = None
|
||||||
|
self.response = None
|
||||||
|
self.endpoint_url = None
|
||||||
|
self.token = None
|
||||||
|
|
||||||
|
def run(self) -> None:
|
||||||
|
with pyodbc.connect(self.conn_str) as self.conn:
|
||||||
|
with self.conn.cursor() as cur:
|
||||||
|
self.cursor = cur
|
||||||
|
# Mark as working
|
||||||
|
self.set_status(1)
|
||||||
|
|
||||||
|
# Query and Sync
|
||||||
|
self.populate_data()
|
||||||
|
self.perform_request()
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def get_filters(self):
|
||||||
|
return ' AND '.join([f'{f} = ?' for f in self.conf.filters])
|
||||||
|
|
||||||
|
def get_where(self):
|
||||||
|
return f'WHERE \n {self.get_filters()}'
|
||||||
|
|
||||||
|
def get_fields(self):
|
||||||
|
# noinspection GrazieInspection
|
||||||
|
"""
|
||||||
|
There are 3 types of fields in a Model
|
||||||
|
###########################################################################################
|
||||||
|
This will append all the fields of the model which are instance of <ThisModelValue>
|
||||||
|
as example for this case
|
||||||
|
branch = fields.ThisModelValue(db_column='Location_Code')
|
||||||
|
the value for keyword db_column will be appended to fields ['Location_Code']
|
||||||
|
if the field has an alias as an example for:
|
||||||
|
unit_price = fields.ThisModelValue(db_column='Price/Quantity', db_alias='unit_price')
|
||||||
|
the following value will be appended to fields:
|
||||||
|
['Price/Quantity AS unit_price']
|
||||||
|
###########################################################################################
|
||||||
|
If the field is instance of One2OneValue
|
||||||
|
an example for this case would be:
|
||||||
|
official_doc = fields.One2OneValue(
|
||||||
|
db_column='ReceiptNumber',
|
||||||
|
db_table='GovernmentReceipt.dbo.ReceiptHistory',
|
||||||
|
join=(
|
||||||
|
('Order_Number', , 'OrderNumber'),
|
||||||
|
('Order_Date', 'OrderDate'),
|
||||||
|
('Location_Code', 'LocationCode')
|
||||||
|
)
|
||||||
|
)
|
||||||
|
the first position of each tuple in join will we appended to the fields because they will be
|
||||||
|
required in order to perform the one 2 one search so the values appended to fields will be:
|
||||||
|
['Order_Number', 'Order_Date', 'Location_Code]
|
||||||
|
###########################################################################################
|
||||||
|
If the field is another Model used for one 2 many relations the filters will be appended
|
||||||
|
an example for this case would be:
|
||||||
|
items = ItemModel(
|
||||||
|
['Location_Code', 'Order_Date', 'Order_Number']
|
||||||
|
)
|
||||||
|
so in this case items.filters will be appended to fields:
|
||||||
|
['Location_Code', 'Order_Date', 'Order_Number']
|
||||||
|
"""
|
||||||
|
fields = []
|
||||||
|
for k, field in self.__dict__.items():
|
||||||
|
if isinstance(field, ThisModelValue):
|
||||||
|
fields.append(field.get_column())
|
||||||
|
elif isinstance(field, One2OneValue):
|
||||||
|
for join in field.join:
|
||||||
|
fields.append(join[0])
|
||||||
|
elif issubclass(field.__class__, self.__class__.__base__):
|
||||||
|
for f in field.filters:
|
||||||
|
fields.append(f)
|
||||||
|
return set(fields)
|
||||||
|
|
||||||
|
def get_select(self):
|
||||||
|
cols = ', '.join([f for f in self.get_fields()])
|
||||||
|
return f'SELECT \n {cols}'
|
||||||
|
|
||||||
|
def get_from(self):
|
||||||
|
# noinspection SpellCheckingInspection,PyUnresolvedReferences
|
||||||
|
return f'FROM {self.conf.db_table} (NOLOCK)'
|
||||||
|
|
||||||
|
def get_query(self):
|
||||||
|
return f'{self.get_select()} \n{self.get_from()} \n{self.get_where()}'
|
||||||
|
|
||||||
|
def assign_values(self):
|
||||||
|
for k, field in self.__dict__.items():
|
||||||
|
if isinstance(field, ThisModelValue):
|
||||||
|
field.value = self.raw_result[field.db_alias]
|
||||||
|
field.raw_result = self.raw_result
|
||||||
|
|
||||||
|
def cursor_execute(self, query, *bind_variables, fetch=None, raise_not_found_error=True):
|
||||||
|
try:
|
||||||
|
self.cursor.execute(query, *bind_variables)
|
||||||
|
result = getattr(self.cursor, fetch)()
|
||||||
|
if not result and raise_not_found_error:
|
||||||
|
raise Exception(
|
||||||
|
f"NOT FOUND \nusing query => {query}"
|
||||||
|
f" with args => {bind_variables}"
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
except pyodbc.ProgrammingError as e:
|
||||||
|
print(f"error => {e} \nwhen executing query => {query} \n with args => {bind_variables}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def populate_me(self):
|
||||||
|
print(f'attempt to populate me {self}')
|
||||||
|
row = self.cursor_execute(
|
||||||
|
self.get_query(),
|
||||||
|
*self.filters,
|
||||||
|
fetch='fetchone',
|
||||||
|
raise_not_found_error=self.raise_not_found_error
|
||||||
|
)
|
||||||
|
if not row:
|
||||||
|
return 0
|
||||||
|
columns = [i[0] for i in self.cursor.description]
|
||||||
|
self.raw_result = dict(zip(columns, row))
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def populate_one2one(self):
|
||||||
|
for k, field in self.__dict__.items():
|
||||||
|
if isinstance(field, One2OneValue):
|
||||||
|
result = self.cursor_execute(
|
||||||
|
field.get_query(),
|
||||||
|
*(self.raw_result[f[0]] for f in field.join),
|
||||||
|
fetch='fetchone',
|
||||||
|
raise_not_found_error=field.raise_not_found_error
|
||||||
|
)
|
||||||
|
if result:
|
||||||
|
field.value = result[0]
|
||||||
|
continue
|
||||||
|
field.value = None
|
||||||
|
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
|
def populate_one2many(self):
|
||||||
|
for k, field in self.__dict__.items():
|
||||||
|
if issubclass(field.__class__, self.__class__.__base__):
|
||||||
|
new_filters = (self.raw_result[f] for f in field.filters)
|
||||||
|
rows = self.cursor_execute(
|
||||||
|
field.get_query(),
|
||||||
|
*new_filters,
|
||||||
|
fetch='fetchall',
|
||||||
|
raise_not_found_error=field.raise_not_found_error
|
||||||
|
)
|
||||||
|
columns = [i[0] for i in self.cursor.description]
|
||||||
|
result = [dict(zip(columns, row)) for row in rows]
|
||||||
|
for r in result:
|
||||||
|
obj = field.__class__(new_filters)
|
||||||
|
obj.raw_result = r
|
||||||
|
obj.assign_values()
|
||||||
|
obj.cursor = self.cursor
|
||||||
|
obj.populate_one2one()
|
||||||
|
obj.populate_one2many()
|
||||||
|
field.many.append(obj)
|
||||||
|
|
||||||
|
def populate_data(self):
|
||||||
|
if not self.populate_me():
|
||||||
|
return
|
||||||
|
self.assign_values()
|
||||||
|
self.populate_one2one()
|
||||||
|
self.populate_one2many()
|
||||||
|
|
||||||
|
def get_value_serialized(self):
|
||||||
|
if self.many:
|
||||||
|
return [obj.get_value_serialized() for obj in self.many]
|
||||||
|
if not self.raw_result:
|
||||||
|
return None
|
||||||
|
data = {}
|
||||||
|
for k, v in self.__dict__.items():
|
||||||
|
try:
|
||||||
|
if isinstance(v, AttributeGetter):
|
||||||
|
data[k] = v.get_value_serialized()
|
||||||
|
|
||||||
|
"""
|
||||||
|
If attribute is one to many relationship and there is no result return
|
||||||
|
empty list
|
||||||
|
"""
|
||||||
|
if issubclass(v.__class__, self.__class__.__base__):
|
||||||
|
value_serialized = v.get_value_serialized()
|
||||||
|
if value_serialized is None:
|
||||||
|
data[k] = []
|
||||||
|
else:
|
||||||
|
data[k] = value_serialized
|
||||||
|
except Exception as e:
|
||||||
|
print(f"error when serializing {k} {v}")
|
||||||
|
raise e
|
||||||
|
return data
|
||||||
|
|
||||||
|
def perform_request(self):
|
||||||
|
self.set_status(2)
|
||||||
|
try:
|
||||||
|
self.response = requests.post(
|
||||||
|
self.endpoint_url,
|
||||||
|
headers={
|
||||||
|
"Authorization": self.token,
|
||||||
|
"Content-Type": "application/json"
|
||||||
|
},
|
||||||
|
data=json.dumps(self.get_value_serialized())
|
||||||
|
)
|
||||||
|
if self.response.ok:
|
||||||
|
self.set_status(4)
|
||||||
|
return
|
||||||
|
self.set_status(3, error=json.dumps(self.response.json()))
|
||||||
|
except Exception as e:
|
||||||
|
self.set_status(3, error=str(e))
|
||||||
|
|
||||||
|
def set_status(self, status, error=None):
|
||||||
|
args = (status, error) + tuple(self.filters)
|
||||||
|
# noinspection SqlDialectInspection,SqlNoDataSourceInspection,SqlResolve
|
||||||
|
self.cursor.execute(
|
||||||
|
f'''UPDATE {self.conf.log_table} WITH (ROWLOCK)
|
||||||
|
SET status=?, error=?
|
||||||
|
WHERE {self.get_filters()}''',
|
||||||
|
*args
|
||||||
|
)
|
||||||
|
self.conn.commit()
|
||||||
12
sql/docr/new_sales.sql
Normal file
12
sql/docr/new_sales.sql
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
insert into pos.dbo.hive_sales
|
||||||
|
select
|
||||||
|
o.Location_Code, o.Order_Date, o.Order_Number, 0, null
|
||||||
|
from pos.dbo.Orders o
|
||||||
|
left join pos.dbo.hive_sales hs
|
||||||
|
on hs.Location_Code = o.Location_Code
|
||||||
|
and hs.Order_Date = o.Order_Date
|
||||||
|
and hs.Order_Number = o.Order_Number
|
||||||
|
where
|
||||||
|
o.Order_Date = ?
|
||||||
|
and o.Order_Status_Code = 4
|
||||||
|
and hs.Order_Number is null
|
||||||
13
sql/docr/new_sales_specific.sql
Normal file
13
sql/docr/new_sales_specific.sql
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
insert into pos.dbo.hive_sales
|
||||||
|
select
|
||||||
|
o.Location_Code, o.Order_Date, o.Order_Number, 0, null
|
||||||
|
from pos.dbo.Orders o
|
||||||
|
left join pos.dbo.hive_sales hs
|
||||||
|
on hs.Location_Code = o.Location_Code
|
||||||
|
and hs.Order_Date = o.Order_Date
|
||||||
|
and hs.Order_Number = o.Order_Number
|
||||||
|
where
|
||||||
|
o.Order_Date = ?
|
||||||
|
and o.Order_Status_Code = 4
|
||||||
|
and hs.Order_Number is null
|
||||||
|
and o.Order_Number = ?
|
||||||
4
sql/docr/queue_table.sql
Normal file
4
sql/docr/queue_table.sql
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
select
|
||||||
|
Location_Code, Order_Date, Order_Number
|
||||||
|
from pos.dbo.hive_sales
|
||||||
|
where status = 0
|
||||||
4
sql/docr/reprocess_sales.sql
Normal file
4
sql/docr/reprocess_sales.sql
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
update pos.dbo.hive_sales
|
||||||
|
set status = 0
|
||||||
|
where
|
||||||
|
Order_Date = ?
|
||||||
5
sql/docr/reprocess_sales_specific.sql
Normal file
5
sql/docr/reprocess_sales_specific.sql
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
update pos.dbo.hive_sales
|
||||||
|
set status = 0
|
||||||
|
where
|
||||||
|
Order_Date = ?
|
||||||
|
and Order_Number = ?
|
||||||
1
sql/docr/summary_done.sql
Normal file
1
sql/docr/summary_done.sql
Normal file
@ -0,0 +1 @@
|
|||||||
|
select * from pos.dbo.Daily_Summary where System_Date = ?
|
||||||
0
utils/__init__.py
Normal file
0
utils/__init__.py
Normal file
37
utils/timezone.py
Normal file
37
utils/timezone.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
try:
|
||||||
|
# noinspection PyUnresolvedReferences
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
def tz(tz_str):
|
||||||
|
return pytz.timezone(tz_str)
|
||||||
|
except ImportError:
|
||||||
|
import zoneinfo
|
||||||
|
|
||||||
|
def tz(tz_str):
|
||||||
|
return zoneinfo.ZoneInfo(tz_str)
|
||||||
|
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
|
||||||
|
class Current:
|
||||||
|
value = timezone.utc
|
||||||
|
|
||||||
|
|
||||||
|
_current = Current()
|
||||||
|
|
||||||
|
|
||||||
|
def set_current(tz_str):
|
||||||
|
_current.value = tz(tz_str)
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def get_current():
|
||||||
|
return _current.value
|
||||||
|
|
||||||
|
|
||||||
|
def now():
|
||||||
|
return _current.value.localize(datetime.now())
|
||||||
|
|
||||||
|
|
||||||
|
def make_aware(value):
|
||||||
|
return _current.value.localize(value)
|
||||||
Loading…
Reference in New Issue
Block a user