First
This commit is contained in:
commit
7be5ebcdaa
166
.gitignore
vendored
Normal file
166
.gitignore
vendored
Normal file
@ -0,0 +1,166 @@
|
|||||||
|
# ---> Python
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
#pdm.lock
|
||||||
|
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||||
|
# in version control.
|
||||||
|
# https://pdm.fming.dev/#use-with-ide
|
||||||
|
.pdm.toml
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
||||||
|
|
||||||
|
# Application-specific
|
||||||
|
*.ini
|
||||||
|
*.db*
|
||||||
|
|
9
LICENSE
Normal file
9
LICENSE
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2023 jyio
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
3
README.md
Normal file
3
README.md
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# vistassh-py
|
||||||
|
|
||||||
|
Python-based web interface for VistA roll-and-scroll terminal
|
193
XWBSSOi.py
Normal file
193
XWBSSOi.py
Normal file
@ -0,0 +1,193 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import ctypes
|
||||||
|
import ctypes.wintypes
|
||||||
|
import winreg
|
||||||
|
import socket
|
||||||
|
import contextlib
|
||||||
|
from typing import Any, Optional, Generator
|
||||||
|
|
||||||
|
DEFAULT_USER_AGENT = 'Borland SOAP 1.2'
|
||||||
|
DEFAULT_ISSUER = 'https://ssoi.sts.va.gov/Issuer/smtoken/SAML2'
|
||||||
|
|
||||||
|
HCERTSTORE = ctypes.c_void_p
|
||||||
|
PCERT_INFO = ctypes.c_void_p
|
||||||
|
HCRYPTPROV_LEGACY = ctypes.c_void_p
|
||||||
|
|
||||||
|
CERT_STORE_PROV_MEMORY = b'Memory'
|
||||||
|
X509_ASN_ENCODING = 0x00000001
|
||||||
|
PKCS_7_ASN_ENCODING = 0x00010000
|
||||||
|
CERT_COMPARE_ANY = 0
|
||||||
|
CERT_COMPARE_SHIFT = 16
|
||||||
|
CERT_FIND_ANY = CERT_COMPARE_ANY<<CERT_COMPARE_SHIFT
|
||||||
|
CERT_NAME_FRIENDLY_DISPLAY_TYPE = 5
|
||||||
|
CERT_DIGITAL_SIGNATURE_KEY_USAGE = 0x80
|
||||||
|
CERT_STORE_ADD_ALWAYS = 4
|
||||||
|
CERT_HASH_PROP_ID = CERT_SHA1_HASH_PROP_ID = 3
|
||||||
|
|
||||||
|
class CERT_CONTEXT(ctypes.Structure):
|
||||||
|
_fields_ = [
|
||||||
|
('dwCertEncodingType', ctypes.wintypes.DWORD),
|
||||||
|
('pbCertEncoded', ctypes.POINTER(ctypes.wintypes.BYTE)),
|
||||||
|
('cbCertEncoded', ctypes.wintypes.DWORD),
|
||||||
|
('pCertInfo', PCERT_INFO),
|
||||||
|
('hCertStore', HCERTSTORE),
|
||||||
|
]
|
||||||
|
PCCERT_CONTEXT = ctypes.POINTER(CERT_CONTEXT)
|
||||||
|
|
||||||
|
crypt32 = ctypes.WinDLL('crypt32')
|
||||||
|
|
||||||
|
CertOpenStore = crypt32.CertOpenStore
|
||||||
|
CertOpenStore.restype = HCERTSTORE
|
||||||
|
CertOpenStore.argtypes = (ctypes.wintypes.LPCSTR, ctypes.wintypes.DWORD, HCRYPTPROV_LEGACY, ctypes.wintypes.DWORD, ctypes.c_void_p)
|
||||||
|
|
||||||
|
CertOpenSystemStoreW = crypt32.CertOpenSystemStoreW
|
||||||
|
CertOpenSystemStoreW.restype = HCERTSTORE
|
||||||
|
CertOpenSystemStoreW.argtypes = (HCRYPTPROV_LEGACY, ctypes.wintypes.LPCWSTR)
|
||||||
|
|
||||||
|
CertCloseStore = crypt32.CertCloseStore
|
||||||
|
CertCloseStore.restype = ctypes.wintypes.BOOL
|
||||||
|
CertCloseStore.argtypes = (HCERTSTORE, ctypes.wintypes.DWORD)
|
||||||
|
|
||||||
|
CertEnumCertificatesInStore = crypt32.CertEnumCertificatesInStore
|
||||||
|
CertEnumCertificatesInStore.restype = PCCERT_CONTEXT
|
||||||
|
CertEnumCertificatesInStore.argtypes = (HCERTSTORE, PCCERT_CONTEXT)
|
||||||
|
|
||||||
|
CertFindCertificateInStore = crypt32.CertFindCertificateInStore
|
||||||
|
CertFindCertificateInStore.restype = PCCERT_CONTEXT
|
||||||
|
CertFindCertificateInStore.argtypes = (HCERTSTORE, ctypes.wintypes.DWORD, ctypes.wintypes.DWORD, ctypes.wintypes.DWORD, ctypes.c_void_p, PCCERT_CONTEXT)
|
||||||
|
|
||||||
|
CertAddCertificateContextToStore = crypt32.CertAddCertificateContextToStore
|
||||||
|
CertAddCertificateContextToStore.restype = ctypes.wintypes.BOOL
|
||||||
|
CertAddCertificateContextToStore.argtypes = (HCERTSTORE, PCCERT_CONTEXT, ctypes.wintypes.DWORD, ctypes.POINTER(PCCERT_CONTEXT))
|
||||||
|
|
||||||
|
CertGetNameStringW = crypt32.CertGetNameStringW
|
||||||
|
CertGetNameStringW.restype = ctypes.wintypes.DWORD
|
||||||
|
CertGetNameStringW.argtypes = (PCCERT_CONTEXT, ctypes.wintypes.DWORD, ctypes.wintypes.DWORD, ctypes.c_void_p, ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD)
|
||||||
|
|
||||||
|
CertGetIntendedKeyUsage = crypt32.CertGetIntendedKeyUsage
|
||||||
|
CertGetIntendedKeyUsage.restype = ctypes.wintypes.BOOL
|
||||||
|
CertGetIntendedKeyUsage.argtypes = (ctypes.wintypes.DWORD, PCERT_INFO, ctypes.POINTER(ctypes.wintypes.BYTE), ctypes.wintypes.DWORD)
|
||||||
|
|
||||||
|
CertGetCertificateContextProperty = crypt32.CertGetCertificateContextProperty
|
||||||
|
CertGetCertificateContextProperty.restype = ctypes.wintypes.BOOL
|
||||||
|
CertGetCertificateContextProperty.argtypes = (PCCERT_CONTEXT, ctypes.wintypes.DWORD, ctypes.c_void_p, ctypes.POINTER(ctypes.wintypes.DWORD))
|
||||||
|
|
||||||
|
CertVerifyTimeValidity = crypt32.CertVerifyTimeValidity
|
||||||
|
CertVerifyTimeValidity.restype = ctypes.wintypes.LONG
|
||||||
|
CertVerifyTimeValidity.argtypes = (ctypes.wintypes.LPFILETIME, PCERT_INFO)
|
||||||
|
|
||||||
|
cryptui = ctypes.WinDLL('cryptui')
|
||||||
|
|
||||||
|
CryptUIDlgSelectCertificateFromStore = cryptui.CryptUIDlgSelectCertificateFromStore
|
||||||
|
CryptUIDlgSelectCertificateFromStore.restype = PCCERT_CONTEXT
|
||||||
|
CryptUIDlgSelectCertificateFromStore.argtypes = (HCERTSTORE, ctypes.wintypes.HWND, ctypes.wintypes.LPCWSTR, ctypes.wintypes.LPCWSTR, ctypes.wintypes.DWORD, ctypes.wintypes.DWORD, ctypes.c_void_p)
|
||||||
|
|
||||||
|
GetConsoleWindow = ctypes.windll.kernel32.GetConsoleWindow
|
||||||
|
GetConsoleWindow.restype = ctypes.wintypes.HWND
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def ManagedCertOpenStore(lpszStoreProvider: ctypes.wintypes.LPCSTR, dwEncodingType: ctypes.wintypes.DWORD, hCryptProv: HCRYPTPROV_LEGACY, dwFlags: ctypes.wintypes.DWORD, pvPara: ctypes.c_void_p) -> Generator[HCERTSTORE, None, None]:
|
||||||
|
res = CertOpenStore(lpszStoreProvider, dwEncodingType, hCryptProv, dwFlags, pvPara)
|
||||||
|
try:
|
||||||
|
yield res
|
||||||
|
finally:
|
||||||
|
CertCloseStore(res, 0)
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def ManagedCertOpenSystemStore(hProv: HCRYPTPROV_LEGACY, szSubsystemProtocol: ctypes.wintypes.LPCWSTR) -> Generator[HCERTSTORE, None, None]:
|
||||||
|
res = CertOpenSystemStoreW(hProv, szSubsystemProtocol)
|
||||||
|
try:
|
||||||
|
yield res
|
||||||
|
finally:
|
||||||
|
CertCloseStore(res, 0)
|
||||||
|
|
||||||
|
def get_vista_certificate(show_cert_dialog: bool=True, hwnd: Optional[int]=0) -> PCCERT_CONTEXT:
|
||||||
|
with ManagedCertOpenSystemStore(0, 'MY') as store_system, ManagedCertOpenStore(CERT_STORE_PROV_MEMORY, 0, None, 0, None) as store_memory:
|
||||||
|
cert_selection = cert_iter = None
|
||||||
|
while cert_iter := CertEnumCertificatesInStore(store_system, cert_iter):
|
||||||
|
if cert_valid := CertFindCertificateInStore(store_system, X509_ASN_ENCODING|PKCS_7_ASN_ENCODING, 0, CERT_FIND_ANY, None, None):
|
||||||
|
name_bufsz = CertGetNameStringW(cert_iter, CERT_NAME_FRIENDLY_DISPLAY_TYPE, 0, None, None, 0)
|
||||||
|
buf = ctypes.create_unicode_buffer(name_bufsz)
|
||||||
|
CertGetNameStringW(cert_iter, CERT_NAME_FRIENDLY_DISPLAY_TYPE, 0, None, buf, name_bufsz)
|
||||||
|
name_string = buf.value
|
||||||
|
key_usage_bits = ctypes.wintypes.BYTE()
|
||||||
|
CertGetIntendedKeyUsage(X509_ASN_ENCODING|PKCS_7_ASN_ENCODING, cert_iter.contents.pCertInfo, ctypes.byref(key_usage_bits), ctypes.sizeof(key_usage_bits))
|
||||||
|
valid_date = CertVerifyTimeValidity(None, cert_iter.contents.pCertInfo)
|
||||||
|
if ((key_usage_bits.value&CERT_DIGITAL_SIGNATURE_KEY_USAGE) == CERT_DIGITAL_SIGNATURE_KEY_USAGE) and (valid_date == 0) and ('Card Authentication' not in name_string) and ('0,' not in name_string) and ('Signature' not in name_string):
|
||||||
|
CertAddCertificateContextToStore(store_memory, cert_iter, CERT_STORE_ADD_ALWAYS, ctypes.byref(cert_valid))
|
||||||
|
cert_selection = cert_valid
|
||||||
|
return CryptUIDlgSelectCertificateFromStore(store_memory, hwnd if hwnd is not None else GetConsoleWindow(), 'VistA Logon - Certificate Selection', 'Select a certificate for VistA authentication', 0, 0, None) if show_cert_dialog else cert_selection
|
||||||
|
|
||||||
|
def get_certificate_thumbprint(certificate: PCCERT_CONTEXT) -> str:
|
||||||
|
bufsz = ctypes.wintypes.DWORD()
|
||||||
|
CertGetCertificateContextProperty(certificate, CERT_HASH_PROP_ID, None, ctypes.byref(bufsz))
|
||||||
|
buffer = ctypes.create_string_buffer(bufsz.value)
|
||||||
|
CertGetCertificateContextProperty(certificate, CERT_HASH_PROP_ID, buffer, ctypes.byref(bufsz))
|
||||||
|
return buffer.value
|
||||||
|
|
||||||
|
def get_certificate_friendly_display_name(certificate: PCCERT_CONTEXT) -> str:
|
||||||
|
name_bufsz = CertGetNameStringW(certificate, CERT_NAME_FRIENDLY_DISPLAY_TYPE, 0, None, None, 0)
|
||||||
|
buffer = ctypes.create_unicode_buffer(name_bufsz)
|
||||||
|
CertGetNameStringW(certificate, CERT_NAME_FRIENDLY_DISPLAY_TYPE, 0, None, buffer, name_bufsz)
|
||||||
|
return buffer.value
|
||||||
|
|
||||||
|
get_registry_iam = lambda: get_registry_value(winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Vista\\Common\\IAM', default='https://services.eauth.va.gov:9301/STS/RequestSecurityToken')
|
||||||
|
get_registry_iam_ad = lambda: get_registry_value(winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Vista\\Common\\IAM_AD', default='https://services.eauth.va.gov:9201/STS/RequestSecurityToken')
|
||||||
|
get_registry_rioserver = lambda: get_registry_value(winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Vista\\Common\\RIOSERVER', default='SecurityTokenService')
|
||||||
|
get_registry_rioport = lambda: get_registry_value(winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Vista\\Common\\RIOPORT', default='RequestSecurityToken')
|
||||||
|
def get_registry_value(hkey: int, subkey: str, value: Optional[str]=None, default: Any=None) -> Any:
|
||||||
|
try:
|
||||||
|
with winreg.OpenKey(hkey, subkey) as key:
|
||||||
|
return winreg.QueryValueEx(key, value)[0]
|
||||||
|
except FileNotFoundError:
|
||||||
|
return default
|
||||||
|
|
||||||
|
# issuer: https://ssoi.sts.va.gov/Issuer/smtoken/SAML2
|
||||||
|
# application: https://{computer_name}/Delphi_RPC_Broker/{app_name}
|
||||||
|
# app_name: CPRSChart.exe
|
||||||
|
def get_iam_request(application: str, issuer: str) -> str:
|
||||||
|
return f'''<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:ns="http://docs.oasis-open.org/ws-sx/ws-trust/200512">
|
||||||
|
<soapenv:Header/>
|
||||||
|
<soapenv:Body>
|
||||||
|
<ns:RequestSecurityToken>
|
||||||
|
<ns:Base>
|
||||||
|
<wss:TLS xmlns:wss="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd"/>
|
||||||
|
</ns:Base>
|
||||||
|
<wsp:AppliesTo xmlns:wsp="http://schemas.xmlsoap.org/ws/2004/09/policy">
|
||||||
|
<wsa:EndpointReference xmlns:wsa="http://schemas.xmlsoap.org/ws/2004/08/addressing">
|
||||||
|
<wsa:Address>{application}</wsa:Address>
|
||||||
|
</wsa:EndpointReference>
|
||||||
|
</wsp:AppliesTo>
|
||||||
|
<ns:Issuer>
|
||||||
|
<wsa:Address xmlns:wsa="http://schemas.xmlsoap.org/ws/2004/08/addressing">{issuer}</wsa:Address>
|
||||||
|
</ns:Issuer>
|
||||||
|
<ns:RequestType>http://schemas.xmlsoap.org/ws/2005/02/trust/Validate</ns:RequestType>
|
||||||
|
</ns:RequestSecurityToken>
|
||||||
|
</soapenv:Body>
|
||||||
|
</soapenv:Envelope>'''
|
||||||
|
|
||||||
|
get_local_hostname = lambda: socket.getfqdn()
|
||||||
|
|
||||||
|
def get_sso_token(application: Optional[str]=None, hostname: Optional[str]=None, issuer: Optional[str]=None, iam: Optional[str]=None, ua: Optional[str]=None, certificate: Optional[str]=None) -> str:
|
||||||
|
import sys, subprocess
|
||||||
|
if certificate is None:
|
||||||
|
certificate = get_certificate_thumbprint(get_vista_certificate()).hex()
|
||||||
|
res = subprocess.run(['curl', '-fsSL', '-X', 'POST', iam or get_registry_iam(), '--ca-native', '--cert', 'CurrentUser\\MY\\' + certificate, '-A', ua or DEFAULT_USER_AGENT, '-H', 'Content-Type: application/xml', '-H', 'Accept: application/xml', '-d', get_iam_request(f"https://{hostname or get_local_hostname()}/Delphi_RPC_Broker/{application or 'CPRSChart.exe'}", issuer or DEFAULT_ISSUER)], capture_output=True)
|
||||||
|
print(res.stderr.decode('utf8'), end='', file=sys.stderr)
|
||||||
|
return res.stdout.decode('utf8')
|
||||||
|
|
||||||
|
async def get_sso_token_async(application: Optional[str]=None, hostname: Optional[str]=None, issuer: Optional[str]=None, iam: Optional[str]=None, ua: Optional[str]=None, certificate: Optional[str]=None) -> str:
|
||||||
|
import sys, asyncio
|
||||||
|
if certificate is None:
|
||||||
|
certificate = get_certificate_thumbprint(get_vista_certificate()).hex()
|
||||||
|
res = await (await asyncio.create_subprocess_exec('curl', '-fsSL', '-X', 'POST', iam or get_registry_iam(), '--ca-native', '--cert', 'CurrentUser\\MY\\' + certificate, '-A', ua or DEFAULT_USER_AGENT, '-H', 'Content-Type: application/xml', '-H', 'Accept: application/xml', '-d', get_iam_request(f"https://{hostname or get_local_hostname()}/Delphi_RPC_Broker/{application or 'CPRSChart.exe'}", issuer or DEFAULT_ISSUER), stdin=None, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE)).communicate()
|
||||||
|
print(res[1].decode('utf8'), end='', file=sys.stderr)
|
||||||
|
return res[0].decode('utf8')
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
try:
|
||||||
|
print(get_sso_token())
|
||||||
|
except OSError:
|
||||||
|
exit(1)
|
285
autoproc.py
Normal file
285
autoproc.py
Normal file
@ -0,0 +1,285 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import getpass
|
||||||
|
import re
|
||||||
|
import codecs
|
||||||
|
import asyncio
|
||||||
|
import contextlib
|
||||||
|
import logging
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
|
from typing import Optional, Union, Sequence, NamedTuple, Callable
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ExpectMatch = namedtuple('PatternMatch', ('batch', 'index', 'pattern', 'match', 'groups', 'groupdict', 'before'))
|
||||||
|
ExpectMatch.__new__.__defaults__ = (None,)*len(ExpectMatch._fields)
|
||||||
|
class ExpectQ(object):
|
||||||
|
"""Provide an expect-like interface over an asyncio queue"""
|
||||||
|
def __init__(self, pipequeue: asyncio.Queue, timeout_settle: float=1):
|
||||||
|
self.pipequeue = pipequeue
|
||||||
|
self.buffer = ''
|
||||||
|
self.timeout_settle = timeout_settle
|
||||||
|
def set_timeout(self, timeout_settle: float=1):
|
||||||
|
"""Set default timeout"""
|
||||||
|
self.timeout_settle = timeout_settle
|
||||||
|
def reset(self, buffer: str=''):
|
||||||
|
"""Clear or restore buffer"""
|
||||||
|
self.buffer = buffer
|
||||||
|
clear = reset
|
||||||
|
async def prompts(self, endl: str='\r\n', timeout_settle: Optional[float]=None, throw: bool=False):
|
||||||
|
len_endl = len(endl)
|
||||||
|
while True:
|
||||||
|
if (pos := self.buffer.rfind(endl)) >= 0:
|
||||||
|
buffer = self.buffer
|
||||||
|
self.buffer = ''
|
||||||
|
yield buffer, pos + len_endl
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
self.buffer += await asyncio.wait_for(self.pipequeue.get(), timeout=(timeout_settle or self.timeout_settle))
|
||||||
|
break
|
||||||
|
except asyncio.TimeoutError: # no more data
|
||||||
|
if throw:
|
||||||
|
raise
|
||||||
|
yield None, None
|
||||||
|
async def promptmatches(self, *mappings: Union[str, re.Pattern, tuple, list], endl: str='\r\n', timeout_settle: Optional[float]=None, throw: bool=False):
|
||||||
|
for i, mapping in enumerate(mappings):
|
||||||
|
try:
|
||||||
|
match mapping:
|
||||||
|
case (str() as pattern, response) if response is None or isinstance(response, str) or callable(response):
|
||||||
|
async for buffer, pos in self.prompts(endl=endl, timeout_settle=timeout_settle, throw=True):
|
||||||
|
if pattern == buffer[pos:]:
|
||||||
|
yield (m := self.ExactMatch(batch=i, index=0, pattern=mapping, match=mapping, groups=None, groupdict=None, before=buffer[:pos])), (response(m) if callable(response) else response)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
self.reset(buffer)
|
||||||
|
case (re.Pattern() as pattern, response) if response is None or isinstance(response, str) or callable(response):
|
||||||
|
async for buffer, pos in self.prompts(endl=endl, timeout_settle=timeout_settle, throw=True):
|
||||||
|
if match := pattern.search(buffer[pos:]):
|
||||||
|
yield (m := self.PatternMatch(batch=i, index=0, pattern=pattern, match=match, groups=match.groups(), groupdict=match.groupdict(), before=buffer[:pos])), (response(m) if callable(response) else response)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
self.reset(buffer)
|
||||||
|
case (*_,) as components:
|
||||||
|
exact = {}
|
||||||
|
expr = {}
|
||||||
|
for j, component in enumerate(components):
|
||||||
|
match component:
|
||||||
|
case (str() as pattern, response, *rest) if response is None or isinstance(response, str) or callable(response):
|
||||||
|
exact[pattern] = (j, response, None if len(rest) < 1 else rest[0])
|
||||||
|
case (re.Pattern() as pattern, response, *rest) if response is None or isinstance(response, str) or callable(response):
|
||||||
|
expr[pattern] = (j, response, None if len(rest) < 1 else rest[0])
|
||||||
|
async for buffer, pos in self.prompts(endl=endl, timeout_settle=timeout_settle, throw=True):
|
||||||
|
if buffer is not None:
|
||||||
|
prompt = buffer[pos:]
|
||||||
|
if prompt in exact:
|
||||||
|
j, response, end = exact[prompt]
|
||||||
|
interrupt = yield (m := self.ExactMatch(batch=i, index=j, pattern=prompt, match=prompt, groups=None, groupdict=None, before=buffer[:pos])), (response(m) if callable(response) else response)
|
||||||
|
else:
|
||||||
|
for pattern in expr:
|
||||||
|
if match := pattern.search(prompt):
|
||||||
|
j, response, end = expr[pattern]
|
||||||
|
interrupt = yield (m := self.PatternMatch(batch=i, index=j, pattern=pattern, match=match, groups=match.groups(), groupdict=match.groupdict(), before=buffer[:pos])), (response(m) if callable(response) else response)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
self.reset(buffer)
|
||||||
|
continue
|
||||||
|
if interrupt:
|
||||||
|
yield
|
||||||
|
break
|
||||||
|
elif end:
|
||||||
|
break
|
||||||
|
except asyncio.TimeoutError as ex: # no more data
|
||||||
|
if throw:
|
||||||
|
raise asyncio.TimeoutError(*(ex.args + (i, mapping)))
|
||||||
|
yield None, None
|
||||||
|
async def earliest(self, *patterns: Union[str, re.Pattern], timeout_settle: Optional[float]=None, throw: bool=False) -> Optional[NamedTuple]:
|
||||||
|
"""Wait for any string or regular expression pattern match, specified in *patterns, and optionally raise exception upon timeout"""
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
for i, pattern in enumerate(patterns): # try every option
|
||||||
|
if isinstance(pattern, str):
|
||||||
|
if (pos := self.buffer.find(pattern)) >= 0: # found it
|
||||||
|
res = self.ExactMatch(index=i, pattern=pattern, match=pattern, groups=None, groupdict=None, before=self.buffer[:pos])
|
||||||
|
self.buffer = self.buffer[pos + len(pattern):]
|
||||||
|
return res
|
||||||
|
else:
|
||||||
|
if match := pattern.search(self.buffer): # found it
|
||||||
|
res = self.PatternMatch(index=i, pattern=pattern, match=match, groups=match.groups(), groupdict=match.groupdict(), before=self.buffer[:match.start()])
|
||||||
|
self.buffer = self.buffer[match.end():]
|
||||||
|
return res
|
||||||
|
else: # fetch more data
|
||||||
|
self.buffer += await asyncio.wait_for(self.pipequeue.get(), timeout=(timeout_settle or self.timeout_settle))
|
||||||
|
except asyncio.TimeoutError: # no more data
|
||||||
|
if throw:
|
||||||
|
raise
|
||||||
|
return None
|
||||||
|
async def startswith(self, *patterns: Union[str, re.Pattern], timeout_settle: Optional[float]=None, throw: bool=False) -> Optional[NamedTuple]:
|
||||||
|
"""Wait for any string or regular expression pattern match, specified in *patterns, at the start of the stream and optionally raise exception upon timeout"""
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
for i, pattern in enumerate(patterns): # try every option
|
||||||
|
if isinstance(pattern, str):
|
||||||
|
if self.buffer.startswith(pattern): # found it
|
||||||
|
res = self.ExactMatch(index=i, pattern=pattern, match=pattern, groups=None, groupdict=None, before='')
|
||||||
|
self.buffer = self.buffer[len(pattern):]
|
||||||
|
return res
|
||||||
|
else:
|
||||||
|
if match := pattern.match(self.buffer): # found it
|
||||||
|
res = self.PatternMatch(index=i, pattern=pattern, match=match, groups=match.groups(), groupdict=match.groupdict(), before=self.buffer[:match.start()])
|
||||||
|
self.buffer = self.buffer[match.end():]
|
||||||
|
return res
|
||||||
|
else: # fetch more data
|
||||||
|
self.buffer += await asyncio.wait_for(self.pipequeue.get(), timeout=(timeout_settle or self.timeout_settle))
|
||||||
|
except asyncio.TimeoutError: # no more data
|
||||||
|
if throw:
|
||||||
|
raise
|
||||||
|
return None
|
||||||
|
async def endswith(self, *patterns: Union[str, re.Pattern], timeout_settle: Optional[float]=None, throw: bool=False) -> Optional[NamedTuple]:
|
||||||
|
"""Wait for any string or regular expression pattern match, specified in *patterns, at the end of the stream and optionally raise exception upon timeout"""
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
for i, pattern in enumerate(patterns): # try every option
|
||||||
|
if isinstance(pattern, str):
|
||||||
|
if self.buffer.endswith(pattern): # found it
|
||||||
|
res = self.ExactMatch(index=i, pattern=pattern, match=pattern, groups=None, groupdict=None, before=self.buffer[:-len(pattern)])
|
||||||
|
self.buffer = ''
|
||||||
|
return res
|
||||||
|
else:
|
||||||
|
if match := pattern.search(self.buffer): # found it
|
||||||
|
res = self.PatternMatch(index=i, pattern=pattern, match=match, groups=match.groups(), groupdict=match.groupdict(), before=self.buffer[:match.start()])
|
||||||
|
self.buffer = self.buffer[match.end():]
|
||||||
|
return res
|
||||||
|
else: # fetch more data
|
||||||
|
self.buffer += await asyncio.wait_for(self.pipequeue.get(), timeout=(timeout_settle or self.timeout_settle))
|
||||||
|
except asyncio.TimeoutError: # no more data
|
||||||
|
if throw:
|
||||||
|
raise
|
||||||
|
return None
|
||||||
|
__call__ = earliest
|
||||||
|
ExactMatch = type('ExactMatch', (ExpectMatch,), {})
|
||||||
|
PatternMatch = type('PatternMatch', (ExpectMatch,), {})
|
||||||
|
|
||||||
|
class LockableCallable(object):
|
||||||
|
def __init__(self, func: Callable, lock: asyncio.Lock=None):
|
||||||
|
if lock is None:
|
||||||
|
lock = asyncio.Lock()
|
||||||
|
self.lock = lock
|
||||||
|
self.locked = lock.locked
|
||||||
|
self.acquire = lock.acquire
|
||||||
|
self.release = lock.release
|
||||||
|
self.func = func
|
||||||
|
self.__name__ = func.__name__
|
||||||
|
self.__doc__ = func.__doc__
|
||||||
|
def __call__(self, *args, **kw):
|
||||||
|
return self.func(*args, **kw)
|
||||||
|
async def __aenter__(self):
|
||||||
|
await self.lock.acquire()
|
||||||
|
async def __aexit__(self, exc_type, exc, tb):
|
||||||
|
self.lock.release()
|
||||||
|
async def withlock(self, *args, **kw):
|
||||||
|
async with self.lock:
|
||||||
|
return self.func(*args, **kw)
|
||||||
|
|
||||||
|
async def create_instrumented_subprocess_exec(*args: str, stdin_endl=b'\n', **kw) -> asyncio.subprocess.Process:
|
||||||
|
"""Create asyncio subprocess, coupled to host stdio, with ability to attach tasks that could inspect its stdout and inject into its stdin"""
|
||||||
|
process = await asyncio.create_subprocess_exec(*args, **kw)
|
||||||
|
tasks = set()
|
||||||
|
queues = set()
|
||||||
|
def create_task(*args, **kw):
|
||||||
|
tasks.add(item := asyncio.create_task(*args, **kw))
|
||||||
|
item.add_done_callback(tasks.remove)
|
||||||
|
return item
|
||||||
|
process.create_task = create_task
|
||||||
|
def subscribe(pipequeue=None):
|
||||||
|
queues.add(pipequeue := pipequeue or asyncio.Queue())
|
||||||
|
pipequeue.unsubscribe = lambda: queues.remove(pipequeue)
|
||||||
|
return pipequeue
|
||||||
|
process.subscribe = subscribe
|
||||||
|
def sendline(data=None, endl=None):
|
||||||
|
if data is not None:
|
||||||
|
process.stdin.write(data.encode('utf-8') + (endl or stdin_endl))
|
||||||
|
else:
|
||||||
|
process.stdin.write(endl or stdin_endl)
|
||||||
|
process.sendline = LockableCallable(sendline)
|
||||||
|
create_task(stdout_writer(process.stdout, queues), name='@task:stdout-writer') # stdout
|
||||||
|
process_wait = process.wait
|
||||||
|
async def wait_wrapper(): # clean up tasks at the end
|
||||||
|
await process_wait()
|
||||||
|
proc_id = id(process)
|
||||||
|
logger.debug('SHUTDOWN [proc#%d]: cleaning up'%proc_id)
|
||||||
|
for item in set(tasks): # copy set to avoid RuntimeError: Set changed size during iteration
|
||||||
|
if not item.done():
|
||||||
|
item.cancel()
|
||||||
|
try:
|
||||||
|
logger.debug('SHUTDOWN [proc#%d]: stopping [task#%d] %r'%(proc_id, id(item), item))
|
||||||
|
await item
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
logger.debug('SHUTDOWN [proc#%d]: stopped [task#%d]'%(proc_id, id(item)))
|
||||||
|
logger.debug('SHUTDOWN [proc#%d]: done'%proc_id)
|
||||||
|
process.wait = wait_wrapper
|
||||||
|
return process
|
||||||
|
|
||||||
|
async def stdout_writer(pipe: asyncio.StreamWriter, subscribers: Sequence[asyncio.Task], chunksize: int=4096, echo: bool=True):
|
||||||
|
"""Read data from pipe, decode into Unicode strings, and send to subscribers"""
|
||||||
|
try:
|
||||||
|
decoder = codecs.getincrementaldecoder('utf-8')(errors='replace')
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
chunk = await pipe.read(chunksize) # fetch a bunch of bytes
|
||||||
|
if not chunk: # EOF
|
||||||
|
break
|
||||||
|
text = decoder.decode(chunk)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
continue
|
||||||
|
except UnicodeDecodeError: # should not encounter errors with errors='replace'
|
||||||
|
logger.exception('stdout_writer')
|
||||||
|
break # bail on error
|
||||||
|
else:
|
||||||
|
if echo: # echo to stdout
|
||||||
|
sys.stdout.write(text)
|
||||||
|
sys.stdout.flush()
|
||||||
|
for item in subscribers: # distribute to subscribers
|
||||||
|
await item.put(text)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
logger.info('KeyboardInterrupt: stdout_writer')
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def subscribe(proc):
|
||||||
|
queue = proc.subscribe()
|
||||||
|
queue.sendline = proc.sendline
|
||||||
|
try:
|
||||||
|
yield queue
|
||||||
|
finally:
|
||||||
|
queue.unsubscribe()
|
||||||
|
|
||||||
|
@contextlib.asynccontextmanager
|
||||||
|
async def subscribe_async(proc):
|
||||||
|
queue = proc.subscribe()
|
||||||
|
queue.sendline = proc.sendline
|
||||||
|
try:
|
||||||
|
yield queue
|
||||||
|
finally:
|
||||||
|
queue.unsubscribe()
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def expect(proc):
|
||||||
|
queue = proc.subscribe()
|
||||||
|
expect = ExpectQ(queue)
|
||||||
|
expect.sendline = proc.sendline
|
||||||
|
try:
|
||||||
|
yield expect
|
||||||
|
finally:
|
||||||
|
queue.unsubscribe()
|
||||||
|
|
||||||
|
@contextlib.asynccontextmanager
|
||||||
|
async def expect_async(proc):
|
||||||
|
queue = proc.subscribe()
|
||||||
|
expect = ExpectQ(queue)
|
||||||
|
expect.sendline = proc.sendline
|
||||||
|
try:
|
||||||
|
yield expect
|
||||||
|
finally:
|
||||||
|
queue.unsubscribe()
|
30
ext_discovery.py
Normal file
30
ext_discovery.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import re
|
||||||
|
import autoproc
|
||||||
|
|
||||||
|
async def cmd_listclinics(proc):
|
||||||
|
"""Fetch list of clinics"""
|
||||||
|
async with proc.sendline, autoproc.expect_async(proc) as expect:
|
||||||
|
proc.sendline('^Appointment List')
|
||||||
|
assert await expect.endswith('\r\nSelect division: ALL// ')
|
||||||
|
proc.sendline() # default ALL
|
||||||
|
assert await expect.endswith('\r\nCount, Non Count, or Both: C//')
|
||||||
|
proc.sendline('Both')
|
||||||
|
assert await expect.endswith('\r\nSelect clinic: ALL// ')
|
||||||
|
proc.sendline('??')
|
||||||
|
assert await expect.earliest('\r\n Choose from:')
|
||||||
|
while m_delimiter := await expect.endswith('\r\n Type <Enter> to continue or \'^\' to exit: ', '\r\nSelect clinic: ALL// '):
|
||||||
|
for line in m_delimiter.before.splitlines():
|
||||||
|
line = line.strip()
|
||||||
|
if len(line) > 0:
|
||||||
|
assert (m := re.match(r'^(\d+)\s{2,}(.*?)(?:\s{2,}(.*?))?$', line))
|
||||||
|
yield { 'uid': int(m.group(1)), 'name': m.group(2).upper(), 'provider': m.group(3).upper() if m.group(3) else None }
|
||||||
|
if m_delimiter.index == 0:
|
||||||
|
proc.sendline()
|
||||||
|
else:
|
||||||
|
proc.sendline('^')
|
||||||
|
break
|
||||||
|
proc.sendline('^Patient information AND OE/RR')
|
||||||
|
assert await expect.endswith('\r\nSelect Patient Information and OE/RR Option: ', '\r\nSelect Patient Information and OE/RR <TEST ACCOUNT> Option: ')
|
||||||
|
expect.clear()
|
83
ext_lab.py
Normal file
83
ext_lab.py
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import re
|
||||||
|
import datetime
|
||||||
|
import util
|
||||||
|
import autoproc
|
||||||
|
|
||||||
|
import logging
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
local_tzinfo = datetime.datetime.now(datetime.timezone.utc).astimezone().tzinfo
|
||||||
|
def parse(text):
|
||||||
|
data = { 'body': text.rstrip() }
|
||||||
|
if match := re.search(r'\bProvider: \b(?P<practitioner>.*?)\r\n', text):
|
||||||
|
data.update(match.groupdict())
|
||||||
|
if match := re.search(r'\bSpecimen: \b(?P<specimen>.*?)\r\n', text):
|
||||||
|
data.update(match.groupdict())
|
||||||
|
if match := re.search(r'\bAccession \[UID\]: \b(?P<accession>.*?) \[(?P<uid>.*?)\]\r\n', text):
|
||||||
|
data.update(match.groupdict())
|
||||||
|
if match := re.search(r'\bReport Released: \b(?P<time_released>.*?)\r\n', text):
|
||||||
|
value = match.group(1)
|
||||||
|
data['time_released'] = datetime.datetime.strptime(value, '%b %d, %Y@%H:%M').replace(tzinfo=local_tzinfo) if '@' in value else datetime.datetime.strptime(value, '%b %d, %Y').replace(tzinfo=local_tzinfo)
|
||||||
|
if match := re.search(r'\bSpecimen Collection Date: \b(?P<time_collected>.*?)\r\n', text):
|
||||||
|
value = match.group(1)
|
||||||
|
data['time_collected'] = datetime.datetime.strptime(value, '%b %d, %Y@%H:%M').replace(tzinfo=local_tzinfo) if '@' in value else datetime.datetime.strptime(value, '%b %d, %Y').replace(tzinfo=local_tzinfo)
|
||||||
|
if match := re.match(r'\s+----(.*?)----\s+page \d+\r\n', text):
|
||||||
|
data['title'] = match.group(1)
|
||||||
|
if match := re.search(r'\bAccession \[UID\]: (?P<accession>.*?) \[(?P<uid>.*?)\]\s+Received: (?P<time_received>.*?)\r\n', text):
|
||||||
|
data.update(match.groupdict())
|
||||||
|
data['time_received'] = datetime.datetime.strptime(data['time_received'], '%b %d, %Y@%H:%M').replace(tzinfo=local_tzinfo) if '@' in data['time_received'] else datetime.datetime.strptime(data['time_received'], '%b %d, %Y').replace(tzinfo=local_tzinfo)
|
||||||
|
if match := re.search(r'\bReport Completed: \b(?P<time_completed>.*?)\r\n', text):
|
||||||
|
value = match.group(1)
|
||||||
|
data['time_completed'] = datetime.datetime.strptime(value, '%b %d, %Y@%H:%M').replace(tzinfo=local_tzinfo) if '@' in value else datetime.datetime.strptime(value, '%b %d, %Y').replace(tzinfo=local_tzinfo)
|
||||||
|
if match := re.search(r'\bCollection sample: (?P<specimen>.*?)\s+Collection date: (?P<time_collected>.*?)\r\n', text):
|
||||||
|
data.update(match.groupdict())
|
||||||
|
data['time_collected'] = datetime.datetime.strptime(data['time_collected'], '%b %d, %Y %H:%M').replace(tzinfo=local_tzinfo) if ':' in data['time_collected'] else datetime.datetime.strptime(data['time_collected'], '%b %d, %Y').replace(tzinfo=local_tzinfo)
|
||||||
|
return data
|
||||||
|
|
||||||
|
async def cmd_reports(proc, mrn, alpha, omega):
|
||||||
|
"""Fetch lab reports"""
|
||||||
|
async with proc.sendline, autoproc.expect_async(proc) as expect:
|
||||||
|
proc.sendline('^Laboratory Menu')
|
||||||
|
async for prompt, response in expect.promptmatches(
|
||||||
|
('Select HOSPITAL LOCATION NAME: ', None),
|
||||||
|
('Select Laboratory Menu Option: ', '13'), # Interim report
|
||||||
|
('Select Patient Name: ', mrn),
|
||||||
|
(
|
||||||
|
('Do you wish to continue with this patient [Yes/No]? ', 'Yes'),
|
||||||
|
('Date to START with: TODAY//', util.vista_strftime(omega), True),
|
||||||
|
),
|
||||||
|
('Date to END with: T-7//', util.vista_strftime(alpha)),
|
||||||
|
('Print address page? NO// ', None), # default NO
|
||||||
|
(
|
||||||
|
('Do you want to start each note on a new page? NO// ', None), # default NO
|
||||||
|
('DEVICE: HOME// ', 'HOME;90;1023', True),
|
||||||
|
),
|
||||||
|
throw=True):
|
||||||
|
proc.sendline(response)
|
||||||
|
assert await expect.earliest(' HOME(CRT)\r\n')
|
||||||
|
pages = []
|
||||||
|
async for prompt, response in expect.promptmatches((
|
||||||
|
(re.compile(r' PRESS \'\^\' TO STOP $'), None),
|
||||||
|
(re.compile(r' \'\^\' TO STOP$'), None),
|
||||||
|
('Select Patient Name: ', None),
|
||||||
|
('Select Laboratory Menu Option: ', None, True),
|
||||||
|
), throw=True):
|
||||||
|
proc.sendline(response)
|
||||||
|
match prompt:
|
||||||
|
case autoproc.ExpectMatch(index=(0|1), before=before):
|
||||||
|
if left := re.match(r'(?:\x1b\[H\x1b\[J\x1b\[2J\x1b\[H\r\n\r\n.+?[ ]{2,}Report date: .+?\r\n Pat ID: \d{3}-\d{2}-\d{4}[ ]{2,}SEX: \w[ ]{2,}DOB: .+?[ ]{2,}LOC: .+?\r\n)|(?:\r\n\x1b\[H\x1b\[J\x1b\[2J\x1b\[H\r\n.+?[ ]{2,}\d{3}-\d{2}-\d{4}[ ]{2,}AGE: \d+[^\r\n]*?\r\n)|(?:\r[ ]+\r\r\n.+?[ ]{2,}\d{3}-\d{2}-\d{4}[ ]{2,}AGE: \d+[ ]{2,}.+?\r\n)', before):
|
||||||
|
pages.append(before[len(left.group(0)):])
|
||||||
|
elif re.match(r'(?:\r\n)+.+?[ ]{2,}\d{3}-\d{2}-\d{4}[ ]{2,}.+?[ ]+$', before) or re.match(r'^(?:\r\n)+$', before):
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
print(repr(before))
|
||||||
|
assert False
|
||||||
|
assert await expect.endswith('\r\nSelect Patient Information and OE/RR Option: ', '\r\nSelect Patient Information and OE/RR <TEST ACCOUNT> Option: ')
|
||||||
|
expect.clear()
|
||||||
|
text = re.sub(r'\r\n\s+>> CONTINUATION OF .+? <<(?:(?:\r\n)|(?:\s+page \d+))', '', '\r\n'.join(pages))
|
||||||
|
positions = [m.start() for m in re.finditer(r'(?:(?:[ ]+----MICROBIOLOGY----[ ]+page \d+\r\n\r\n)|(?:[ ]+))Reporting Lab:', text)]
|
||||||
|
positions.append(len(text))
|
||||||
|
for i in range(len(positions) - 1):
|
||||||
|
yield parse(text[positions[i]:positions[i + 1]])
|
67
ext_measurement.py
Normal file
67
ext_measurement.py
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import re
|
||||||
|
import datetime
|
||||||
|
import util
|
||||||
|
import autoproc
|
||||||
|
|
||||||
|
units = {
|
||||||
|
'P': 'bpm',
|
||||||
|
'R': 'bpm',
|
||||||
|
'B/P': 'mmHg',
|
||||||
|
'Body Mass Index': 'kg/m²'
|
||||||
|
}
|
||||||
|
|
||||||
|
local_tzinfo = datetime.datetime.now(datetime.timezone.utc).astimezone().tzinfo
|
||||||
|
async def cmd_entries(proc, mrn, alpha, omega):
|
||||||
|
"""Fetch measurements"""
|
||||||
|
async with proc.sendline, autoproc.expect_async(proc) as expect:
|
||||||
|
proc.sendline('^Results Reporting Menu')
|
||||||
|
async for prompt, response in expect.promptmatches(
|
||||||
|
(
|
||||||
|
(' Press return to continue ', None),
|
||||||
|
('Select Patient(s): ', mrn, True),
|
||||||
|
('Select Patient: ', mrn, True),
|
||||||
|
),
|
||||||
|
('Select Item(s): ', '7'), # Vitals Cumulative Report
|
||||||
|
('Start Date [Time]: T// ', util.vista_strftime(alpha)),
|
||||||
|
(re.compile(r'^Ending Date \[Time\] \(inclusive\): (.*?)// $'), util.vista_strftime(omega)),
|
||||||
|
('DEVICE: HOME// ', 'HOME;;1023'),
|
||||||
|
timeout_settle=2, throw=True):
|
||||||
|
proc.sendline(response)
|
||||||
|
assert await expect.earliest(' HOME(CRT)\r\n')
|
||||||
|
pages = []
|
||||||
|
async for prompt, response in expect.promptmatches((
|
||||||
|
('Press return to continue "^" to escape ', None),
|
||||||
|
('Press RETURN to continue or \'^\' to exit: ', None),
|
||||||
|
('Select Clinician Menu Option: ', None, True),
|
||||||
|
), throw=True):
|
||||||
|
proc.sendline(response)
|
||||||
|
if prompt.index == 0 or prompt.index == 1:
|
||||||
|
pages.append(re.sub(r'^\x1b\[H\x1b\[J\x1b\[2J\x1b\[H\r\n[^\r\n]+? Cumulative Vitals\/Measurements Report[ ]+Page \d+\r\n\r\n-{10,}\r\n(?:\d{2}\/\d{2}\/\d{2} \(continued\)\r\n\r\n)?|\r\n\r\n\*\*\*[^\r\n]+\r\n\r\n[^\r\n]+?VAF 10-7987j\r\nUnit:[^\r\n]+\r\nDivision:[^\r\n]+(?:\r\n)?$', '', prompt.before))
|
||||||
|
assert await expect.endswith('\r\nSelect Patient Information and OE/RR Option: ', '\r\nSelect Patient Information and OE/RR <TEST ACCOUNT> Option: ')
|
||||||
|
expect.clear()
|
||||||
|
for m_date in re.finditer(r'^(?P<date>\d{2}\/\d{2}\/\d{2})\r\n(?P<body>.*?\r\n)(?:(?=\d{2}\/)|\r\n|$)', '\r\n'.join(pages), re.DOTALL|re.MULTILINE):
|
||||||
|
g_date = m_date.group('date')
|
||||||
|
for m_time in re.finditer(r'^(?P<time>\d{2}:\d{2})\r\n(?P<body>.*?\r\n)(?:(?=\d{2}:)|\r\n|$)', m_date.group('body'), re.DOTALL|re.MULTILINE):
|
||||||
|
dt = datetime.datetime.strptime(g_date + ' ' + m_time.group('time'), '%m/%d/%y %H:%M').replace(tzinfo=local_tzinfo)
|
||||||
|
for m_entry in re.finditer(r'^[ ]{4}\b(?P<name>[^:]+):[ ]+(?P<value_american>[\d\.\/%]+)(?P<flag>\*)?(?: (?P<unit_american>\w+))?(?: \((?P<value_metric>\d\S*) (?P<unit_metric>\S+)\))?(?: \((?P<method>[^\)\r\n]+)\))?(?P<comment>(?:\r\n[ ]{9}\S[^\r\n]*)*)', m_time.group('body'), re.DOTALL|re.MULTILINE):
|
||||||
|
m_entry = m_entry.groupdict()
|
||||||
|
m_entry['time'] = dt
|
||||||
|
if m_entry['value_american'].endswith('%'):
|
||||||
|
m_entry['value_american'] = m_entry['value_american'][:-1]
|
||||||
|
m_entry['unit_american'] = '%'
|
||||||
|
if m_entry['value_metric']:
|
||||||
|
m_entry['value'] = m_entry['value_metric']
|
||||||
|
m_entry['unit'] = m_entry['unit_metric']
|
||||||
|
else:
|
||||||
|
m_entry['value'] = m_entry['value_american']
|
||||||
|
m_entry['unit'] = m_entry['unit_american']
|
||||||
|
if m_entry['unit'] is None:
|
||||||
|
m_entry['unit'] = units.get(m_entry['name'])
|
||||||
|
m_entry['comment'] = re.sub(r'\s+', ' ', m_entry['comment'].strip()) if m_entry['comment'] else None
|
||||||
|
yield m_entry
|
||||||
|
if m_entry['name'] == 'B/P':
|
||||||
|
a, b = m_entry['value'] .split('/', 1)
|
||||||
|
yield dict(m_entry, name='SBP', value=a)
|
||||||
|
yield dict(m_entry, name='DBP', value=b)
|
63
ext_note.py
Normal file
63
ext_note.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import re
|
||||||
|
import datetime
|
||||||
|
import util
|
||||||
|
import autoproc
|
||||||
|
|
||||||
|
import logging
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def isnew(text):
|
||||||
|
return text.startswith('\r\n\r\nNOTE DATED: ') or re.match(r'\x1b\[H\x1b\[J\x1b\[2J\x1b\[H\r(-{10,})\r\n.*?Progress Notes\r\n\1\r\nNOTE DATED: ', text)
|
||||||
|
|
||||||
|
def collapse(text):
|
||||||
|
return re.sub(r'\x1b\[H\x1b\[J\x1b\[2J\x1b\[H\r(-{10,})\r\n.*?Progress Notes\r\n\1\r\n(.*?\*\* CONTINUED FROM PREVIOUS SCREEN \*\*\r\n)?', '', text).strip()
|
||||||
|
|
||||||
|
local_tzinfo = datetime.datetime.now(datetime.timezone.utc).astimezone().tzinfo
|
||||||
|
def parse(text):
|
||||||
|
data = re.match(r'NOTE DATED: (?P<note_dated>.*?)\r\n(?:LOCAL TITLE: (?P<local_title>.*?)\r\n)?(?:STANDARD TITLE: (?P<standard_title>.*?)\r\n)?(?:VISIT: (?P<visit>.*?)\r\n)?(?:ADMITTED: (?P<admitted>.*?)\r\n)?', text).groupdict()
|
||||||
|
data['note_dated'] = datetime.datetime.strptime(data['note_dated'], '%m/%d/%Y %H:%M').replace(tzinfo=local_tzinfo) if ':' in data['note_dated'] else datetime.datetime.strptime(data['note_dated'], '%m/%d/%Y').replace(tzinfo=local_tzinfo)
|
||||||
|
data['body'] = text
|
||||||
|
return data
|
||||||
|
|
||||||
|
async def cmd_reports(proc, mrn, alpha, omega):
|
||||||
|
"""Fetch progress notes"""
|
||||||
|
async with proc.sendline, autoproc.expect_async(proc) as expect:
|
||||||
|
proc.sendline('^PNPT')
|
||||||
|
assert await expect.endswith('\r\nSelect PATIENT NAME: ')
|
||||||
|
proc.sendline(mrn)
|
||||||
|
assert await expect.endswith('\r\nPrint Notes Beginning: ')
|
||||||
|
proc.sendline(util.vista_strftime(omega))
|
||||||
|
assert await expect.endswith('\r\n Thru: ')
|
||||||
|
proc.sendline(util.vista_strftime(alpha))
|
||||||
|
assert await expect.endswith('\r\nDo you want WORK copies or CHART copies? CHART// ')
|
||||||
|
proc.sendline() # default CHART
|
||||||
|
if await expect.endswith('\r\nDo you want to start each note on a new page? NO// '):
|
||||||
|
proc.sendline() # default NO
|
||||||
|
assert await expect.endswith('\r\nDEVICE: HOME// ')
|
||||||
|
proc.sendline('HOME;;1023')
|
||||||
|
assert await expect.earliest(' HOME(CRT)\r\n')
|
||||||
|
pages = []
|
||||||
|
while True:
|
||||||
|
match m_delimiter := await expect.endswith('\r\nType <Enter> to continue or \'^\' to exit: ', '\r\nSelect PATIENT NAME: '):
|
||||||
|
case autoproc.ExpectMatch(index=0, before=before):
|
||||||
|
if isnew(before) and len(pages) > 0:
|
||||||
|
yield parse(collapse('\r\n'.join(pages)))
|
||||||
|
pages = []
|
||||||
|
pages.append(before)
|
||||||
|
proc.sendline()
|
||||||
|
case autoproc.ExpectMatch(index=1, before=before):
|
||||||
|
if isnew(before) and len(pages) > 0:
|
||||||
|
yield parse(collapse('\r\n'.join(pages)))
|
||||||
|
pages = []
|
||||||
|
pages.append(before)
|
||||||
|
yield parse(collapse('\r\n'.join(pages)))
|
||||||
|
proc.sendline('^')
|
||||||
|
assert await expect.endswith('\r\nSelect Progress Notes Print Options Option: ')
|
||||||
|
break
|
||||||
|
case _: assert False
|
||||||
|
proc.sendline('^')
|
||||||
|
proc.sendline('^Patient information AND OE/RR')
|
||||||
|
assert await expect.endswith('\r\nSelect Patient Information and OE/RR Option: ', '\r\nSelect Patient Information and OE/RR <TEST ACCOUNT> Option: ')
|
||||||
|
expect.clear()
|
85
ext_order.py
Normal file
85
ext_order.py
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import re
|
||||||
|
import datetime
|
||||||
|
import util
|
||||||
|
import autoproc
|
||||||
|
|
||||||
|
import logging
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
local_tzinfo = datetime.datetime.now(datetime.timezone.utc).astimezone().tzinfo
|
||||||
|
def parse(raw, prev=None):
|
||||||
|
assert (m := re.match(r'\b(?P<date_entered>\d{2}/\d{2}/\d{2}) (?P<status>[ a-z+]{3})(?:(?P<plus>\+)|[ ])(?P<text0>[^\r\n]{40}) (?P<requestor_name>[^\r\n]{,10})(?: (?P<date_start>\d{2}/\d{2}/\d{2})|[ ]{9})?(?: (?P<date_stop>\d{2}/\d{2})|[ ]{7})?\r\n (?P<time_entered>\d{2}:\d{2}) (?P<text1>[^\r\n]{40}) (?P<requestor_occupation>[^\r\n]{,10})[ ]{4}(?:(?P<time_start>\d{2}:\d{2})|[ ]{5})?[ ]{2}(?:(?P<time_stop>\d{2}:\d{2})|[ ]{5})?\r\n(?P<text>.*?)\r\n\r\n', raw, re.DOTALL))
|
||||||
|
data = { 'body': raw.strip() }
|
||||||
|
data.update((k, prev[k] if v == '"' and prev else v) for k, v in ((k, (v.strip() or None) if v is not None else v) for k, v in m.groupdict().items()))
|
||||||
|
if data.get('date_entered'):
|
||||||
|
if data.get('time_entered') and data['time_entered'] != '24:00':
|
||||||
|
data['datetime_entered'] = datetime.datetime.strptime(data['date_entered'] + ' ' + data['time_entered'], '%m/%d/%y %H:%M').replace(tzinfo=local_tzinfo)
|
||||||
|
else:
|
||||||
|
data['datetime_entered'] = datetime.datetime.strptime(data['date_entered'], '%m/%d/%y').replace(tzinfo=local_tzinfo).date()
|
||||||
|
if data.get('date_start'):
|
||||||
|
if data.get('time_start') and data['time_start'] != '24:00':
|
||||||
|
data['datetime_start'] = datetime.datetime.strptime(data['date_start'] + ' ' + data['time_start'], '%m/%d/%y %H:%M').replace(tzinfo=local_tzinfo)
|
||||||
|
else:
|
||||||
|
data['datetime_start'] = datetime.datetime.strptime(data['date_start'], '%m/%d/%y').replace(tzinfo=local_tzinfo).date()
|
||||||
|
if data.get('date_stop'):
|
||||||
|
reference = data.get('datetime_start') or datetime.datetime.now()
|
||||||
|
if isinstance(reference, datetime.date):
|
||||||
|
reference = datetime.datetime.combine(reference, datetime.time(0, 0), tzinfo=local_tzinfo)
|
||||||
|
if data.get('time_stop') and data['time_stop'] != '24:00':
|
||||||
|
data['datetime_stop'] = adjustyear(datetime.datetime.strptime(data['date_stop'] + ' ' + data['time_stop'], '%m/%d %H:%M').replace(tzinfo=local_tzinfo), reference)
|
||||||
|
else:
|
||||||
|
data['datetime_stop'] = adjustyear(datetime.datetime.strptime(data['date_stop'], '%m/%d').replace(tzinfo=local_tzinfo), reference).date()
|
||||||
|
if text := m.groupdict().get('text'):
|
||||||
|
assert (m := re.search(r'^[ ]{9}(?:Nrs:(?P<nrs>[^\r\n]{6})|[ ]{10})(?:Chrt:(?P<chrt>[^\r\n]{6})|[ ]{11})Typ:(?P<typ>[^\r\n]{20})Sgn:(?P<sgn>[^\r\n]+?)(?:\r\n|$)', text, re.MULTILINE))
|
||||||
|
data.update((k, v.strip()) for k, v in m.groupdict().items() if v is not None)
|
||||||
|
begin, end = m.span()
|
||||||
|
data['text'] = ((data['text0'].strip() + '\r\n') if data.get('text0') else '') + ((data['text1'].strip() + '\r\n') if data.get('text1') else '') + re.sub(r'\r\n[ \t]+|[ \t]+\r\n', '\r\n', text[:begin].strip())
|
||||||
|
if len(text) > end:
|
||||||
|
data['footer'] = text[end:]
|
||||||
|
text = text[:begin]
|
||||||
|
else:
|
||||||
|
data['text'] = ((data['text0'].strip() + '\r\n') if data.get('text0') else '') + (data['text1'].strip() if data.get('text1') else '')
|
||||||
|
del data['text0'], data['text1']
|
||||||
|
return data
|
||||||
|
|
||||||
|
def adjustyear(dt, after):
|
||||||
|
if dt > after:
|
||||||
|
return dt
|
||||||
|
dt = dt.replace(year=after.year)
|
||||||
|
return dt if dt > after else dt.replace(year=after.year + 1)
|
||||||
|
|
||||||
|
async def cmd_entries(proc, mrn, alpha, omega):
|
||||||
|
"""Fetch orders"""
|
||||||
|
async with proc.sendline, autoproc.expect_async(proc) as expect:
|
||||||
|
proc.sendline('^Results Reporting Menu')
|
||||||
|
if await expect.endswith('\r\n Press return to continue '): # No patients found
|
||||||
|
proc.sendline()
|
||||||
|
assert await expect.endswith('\r\nSelect Patient(s): ', '\r\nSelect Patient: ', timeout_settle=2)
|
||||||
|
proc.sendline(mrn)
|
||||||
|
assert await expect.endswith('\r\nSelect Item(s): ')
|
||||||
|
proc.sendline('9') # Order Summary for Date/time Range
|
||||||
|
assert await expect.endswith('\r\nStart Date [Time]: T// ')
|
||||||
|
proc.sendline(util.vista_strftime(alpha))
|
||||||
|
assert await expect.endswith(re.compile(r'\r\nEnding Date \[Time\] \(inclusive\): (.*?)// $'))
|
||||||
|
proc.sendline(util.vista_strftime(omega))
|
||||||
|
assert await expect.endswith('\r\nDEVICE: HOME// ')
|
||||||
|
proc.sendline('HOME;90;1023')
|
||||||
|
assert await expect.earliest(' HOME(CRT)\r\n')
|
||||||
|
pages = []
|
||||||
|
while True:
|
||||||
|
match m_delimiter := await expect.endswith('\r\nPress RETURN to continue, \'^\' to exit: ', '\r\nSelect Clinician Menu Option: '):
|
||||||
|
case autoproc.ExpectMatch(index=0, before=before):
|
||||||
|
pages.append(before)
|
||||||
|
proc.sendline()
|
||||||
|
case autoproc.ExpectMatch(index=1):
|
||||||
|
break
|
||||||
|
case _: assert False
|
||||||
|
proc.sendline('^Patient information AND OE/RR')
|
||||||
|
assert await expect.endswith('\r\nSelect Patient Information and OE/RR Option: ', '\r\nSelect Patient Information and OE/RR <TEST ACCOUNT> Option: ')
|
||||||
|
expect.clear()
|
||||||
|
prev = None
|
||||||
|
for m in re.finditer(r'\b\d{2}/\d{2}/\d{2}.*?\r\n\r\n', '\r\n'.join(pages).replace('\x1b[1m', '').replace('\x1b[m', ''), re.DOTALL):
|
||||||
|
prev = parse(m.group(0), prev)
|
||||||
|
yield prev
|
70
ext_patient.py
Normal file
70
ext_patient.py
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import re
|
||||||
|
import autoproc
|
||||||
|
|
||||||
|
async def cmd_lookup_patient(proc, query):
|
||||||
|
"""Lookup patient"""
|
||||||
|
async with proc.sendline, autoproc.expect_async(proc) as expect:
|
||||||
|
proc.sendline('^Patient Inquiry')
|
||||||
|
assert await expect.endswith('\r\nSelect PATIENT NAME: ')
|
||||||
|
proc.sendline(query)
|
||||||
|
assert await expect.startswith(query)
|
||||||
|
res = []
|
||||||
|
single = False
|
||||||
|
async for prompt, response in expect.promptmatches((
|
||||||
|
(re.compile(r'^CHOOSE 1-\d+: $'), None),
|
||||||
|
('Type <Enter> to continue or \'^\' to exit: ', '^'),
|
||||||
|
('Do you wish to view active patient record flag details? Yes// ', 'No'),
|
||||||
|
('Do you want to continue processing this patient record? No// ', None),
|
||||||
|
('Select PATIENT NAME: ', None),
|
||||||
|
('Select Patient Appointments/Inpatient Inquiry Option: ', None, True),
|
||||||
|
), throw=True):
|
||||||
|
proc.sendline(response)
|
||||||
|
if prompt.index <= 4:
|
||||||
|
res.append(prompt.before[:-24] if prompt.index == 0 and prompt.before.endswith('\r\nENTER \'^\' TO STOP, OR \r\n') else prompt.before)
|
||||||
|
if 0 < prompt.index < 4:
|
||||||
|
single = True
|
||||||
|
assert await expect.endswith('\r\nSelect Patient Information and OE/RR Option: ', '\r\nSelect Patient Information and OE/RR <TEST ACCOUNT> Option: ')
|
||||||
|
expect.clear()
|
||||||
|
if single:
|
||||||
|
return [re.search(r'[ ]{2}(?P<name>.+?)[ ]{2}(?:\((?P<alias>[^\)]*?)\))?[ ]{6}(?P<dob>\S+)[ ]{4}(?P<ssn>\S+(?:P \*\*Pseudo SSN\*\*)?)[ ]{5}(?P<yesno>\S+)[ ]{5}(?P<type>.+?)[ ]{6}(?P<no>[^\r\n]*)', res[0].replace('\r\n', '', 1)).groupdict()]
|
||||||
|
return [m.groupdict() for m in re.finditer(r'^[ ]{3}(?P<ordinal>\d+)[ ]{1,3}(?:(?P<last5>[A-Za-z][0-9]{4})[ ]{2})?(?P<name>.+?)[ ]{6,8}(?P<dob>\S+)[ ]{4}(?P<ssn>\S+(?:P \*\*Pseudo SSN\*\*)?)[ ]{5}(?P<yesno>\S+)[ ]{5}(?P<type>.+?)[ ]{6}(?P<no>[^\r\n]*)', re.sub(r'(.{80})\r\n(?! \d)', r'\1', '\r\n\r\n'.join(res)), re.MULTILINE)]
|
||||||
|
|
||||||
|
async def cmd_lookup_patient_ordinal(proc, query, ordinal, force=False):
|
||||||
|
"""Lookup patient"""
|
||||||
|
async with proc.sendline, autoproc.expect_async(proc) as expect:
|
||||||
|
proc.sendline('^Patient Inquiry')
|
||||||
|
assert await expect.endswith('\r\nSelect PATIENT NAME: ')
|
||||||
|
proc.sendline(query)
|
||||||
|
assert await expect.startswith(query)
|
||||||
|
res = []
|
||||||
|
ordinal = str(int(ordinal))
|
||||||
|
async for prompt, response in expect.promptmatches((
|
||||||
|
(re.compile(r'^CHOOSE 1-\d+: $'), None),
|
||||||
|
('Type <Enter> to continue or \'^\' to exit: ', None),
|
||||||
|
('Do you wish to view active patient record flag details? Yes// ', 'No'),
|
||||||
|
('Do you want to continue processing this patient record? No// ', 'Yes' if force else 'No'),
|
||||||
|
('Select PATIENT NAME: ', None),
|
||||||
|
('Select Patient Appointments/Inpatient Inquiry Option: ', None, True),
|
||||||
|
), timeout_settle=2, throw=True):
|
||||||
|
match prompt:
|
||||||
|
case autoproc.ExpectMatch(index=0, before=before):
|
||||||
|
if re.search(r'^[ ]{3}' + ordinal + r'[ ]+', before, re.MULTILINE):
|
||||||
|
proc.sendline(ordinal)
|
||||||
|
else:
|
||||||
|
proc.sendline()
|
||||||
|
case autoproc.ExpectMatch(index=(1|4), before=before):
|
||||||
|
res.append(before[13:] if before.startswith('\x1b[H\x1b[J\x1b[2J\x1b[H') else before)
|
||||||
|
proc.sendline()
|
||||||
|
case autoproc.ExpectMatch(index=2, before=before, match=match):
|
||||||
|
res.append(before + match)
|
||||||
|
proc.sendline(response)
|
||||||
|
case autoproc.ExpectMatch(index=3, before=before, match=match):
|
||||||
|
res.append(before + match)
|
||||||
|
proc.sendline(response)
|
||||||
|
case autoproc.ExpectMatch(index=5):
|
||||||
|
proc.sendline()
|
||||||
|
assert await expect.endswith('\r\nSelect Patient Information and OE/RR Option: ', '\r\nSelect Patient Information and OE/RR <TEST ACCOUNT> Option: ')
|
||||||
|
expect.clear()
|
||||||
|
return re.sub(r'\r\n\r\n(?:[^\r\n;]+);(?:\([^\)]*?\))? (?:\d+ )?(?:\d{3}-\d{2}-\d{4}P?) (?:[^\r\n]+?)[ ]*?(\r\n={10,}\r\n)\r\n', r'\1', '\r\n'.join(res))
|
71
ext_rcrs.py
Normal file
71
ext_rcrs.py
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import re
|
||||||
|
import util
|
||||||
|
import autoproc
|
||||||
|
|
||||||
|
def parse_xml_rcrs(text, summary):
|
||||||
|
summary = {row['uid']: row for row in summary}
|
||||||
|
for patient in re.finditer(r'<Patient>(.+?)</Patient>', text, re.DOTALL):
|
||||||
|
patient = patient.group(1)
|
||||||
|
if (pos := patient.find('<Tumor>')) >= 0:
|
||||||
|
data = parse_xml_items(patient[:pos])
|
||||||
|
data['tumors'] = [parse_xml_items(tumor.group(1)) for tumor in re.finditer(r'<Tumor>(.+?)</Tumor>', patient[pos:], re.DOTALL)]
|
||||||
|
for tumor in data['tumors']:
|
||||||
|
acc = tumor['accessionNumberHosp']
|
||||||
|
meta = tumor['meta'] = summary[acc[:4] + '-' + acc[4:] + '/' + tumor['sequenceNumberHospital']]
|
||||||
|
data['name'] = meta.pop('name')
|
||||||
|
data['last5'] = meta.pop('last5')
|
||||||
|
else:
|
||||||
|
data = parse_xml_items(patient)
|
||||||
|
data['tumors'] = []
|
||||||
|
yield data
|
||||||
|
|
||||||
|
def parse_xml_items(text):
|
||||||
|
return {item.group(1): item.group(2) for item in re.finditer(r'<Item naaccrId="([^"]+)">([^<]+)</Item>', text)}
|
||||||
|
|
||||||
|
async def cmd_patients(proc, alpha, omega):
|
||||||
|
"""Fetch patients from RCRS dump"""
|
||||||
|
async with proc.sendline, autoproc.expect_async(proc) as expect:
|
||||||
|
proc.sendline('^Create RCRS extract')
|
||||||
|
async for prompt, response in expect.prompts_any({
|
||||||
|
re.compile(r'^ Facility Identification Number \(FIN\): \d+// $'): None,
|
||||||
|
' Select date field to be used for Start/End range: ': 'Date DX',
|
||||||
|
re.compile(r'^ Start, Date DX: : \([^\)\r\n]+\): $'): util.vista_strftime(alpha),
|
||||||
|
re.compile(r'^ End, Date DX: : \([^\)\r\n]+\): TODAY// $'): util.vista_strftime(omega),
|
||||||
|
' Are these settings correct? YES// ': None,
|
||||||
|
}, throw=True):
|
||||||
|
proc.sendline(response)
|
||||||
|
if prompt.index == 4:
|
||||||
|
break
|
||||||
|
assert await expect.endswith('\r\n --------------------------------------------------------------\r\n\r\n\r\n')
|
||||||
|
proc.sendline() # skip delay
|
||||||
|
assert (m := await expect.endswith('\r\n\x1a'))
|
||||||
|
doc_rcrs = m.before
|
||||||
|
assert await expect.endswith('\r\nDEVICE: ', timeout_settle=31)
|
||||||
|
proc.sendline('HOME;80') # default HOME
|
||||||
|
assert await expect.earliest('HOME(CRT)\r\n')
|
||||||
|
summary = []
|
||||||
|
async for prompt, response in expect.prompts_any({
|
||||||
|
'\x07': None,
|
||||||
|
'Type <Enter> to continue or \'^\' to exit: ': None,
|
||||||
|
' Press \'RETURN\' to continue, \'^\' to stop: ': None,
|
||||||
|
'Select *..Utility Options Option: ': None,
|
||||||
|
'Select DHCP Tumor Registry Option: ': None,
|
||||||
|
}, throw=True):
|
||||||
|
proc.sendline(response)
|
||||||
|
if prompt.index == 0 or prompt.index == 1:
|
||||||
|
summary.extend({k.strip(): v.strip() for k, v in row.groupdict().items()} for row in re.finditer(r'(?P<last5>[A-Z]\d{4}) (?P<name>[^\r\n]{30}) (?P<uid>[^ \r\n]+) (?P<primarySite>[^ \r\n]+) (?P<dateOfDiagnosis>\d{2}/\d{2}/\d{4}) (?P<dateCaseLastChanged>\d{2}/\d{2}/\d{4})', prompt.before))
|
||||||
|
elif prompt.index == 4:
|
||||||
|
break
|
||||||
|
assert await expect.endswith('\r\nSelect Patient Information and OE/RR Option: ', '\r\nSelect Patient Information and OE/RR <TEST ACCOUNT> Option: ')
|
||||||
|
expect.clear()
|
||||||
|
for item in parse_xml_rcrs(doc_rcrs, summary):
|
||||||
|
yield item
|
||||||
|
|
||||||
|
async def cmd_tumors(proc, alpha, omega):
|
||||||
|
async for patient in cmd_patients(proc, alpha, omega):
|
||||||
|
tumors = patient.pop('tumors')
|
||||||
|
for item in tumors:
|
||||||
|
item['patient'] = patient
|
||||||
|
yield item
|
107
ext_scheduling.py
Normal file
107
ext_scheduling.py
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""Appointments"""
|
||||||
|
|
||||||
|
import re
|
||||||
|
import datetime
|
||||||
|
import autoproc
|
||||||
|
|
||||||
|
import logging
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
local_tzinfo = datetime.datetime.now(datetime.timezone.utc).astimezone().tzinfo
|
||||||
|
async def cmd_appointments(proc, clinics='NPT-HEM/ONC ATTENDING', date='T', storeview=None):
|
||||||
|
"""List appointments"""
|
||||||
|
async with proc.sendline, autoproc.expect_async(proc) as expect:
|
||||||
|
proc.sendline('^Appointment List')
|
||||||
|
assert await expect.endswith('\r\nSelect division: ALL// ')
|
||||||
|
proc.sendline() # default ALL
|
||||||
|
assert await expect.endswith('\r\nCount, Non Count, or Both: C//')
|
||||||
|
proc.sendline('Both')
|
||||||
|
assert await expect.endswith('\r\nSelect clinic: ALL// ')
|
||||||
|
for line in clinics.strip().split('^'):
|
||||||
|
proc.sendline(line)
|
||||||
|
match await expect.endswith('\x07 ??\r\nSelect clinic: ALL// ', '\r\nSelect another clinic: ', '\r\nSelect clinic: ALL// ', re.compile(r'\r\nCHOOSE \d+-\d+: $'), '\r\n ...OK? Yes// '):
|
||||||
|
case autoproc.ExpectMatch(index=0):
|
||||||
|
raise RuntimeError
|
||||||
|
case autoproc.ExpectMatch(index=3):
|
||||||
|
proc.sendline('1') # choose first option
|
||||||
|
case autoproc.ExpectMatch(index=4):
|
||||||
|
proc.sendline() # accept confirmation
|
||||||
|
proc.sendline() # finish clinic selection with blank entry
|
||||||
|
assert await expect.endswith('\r\nFor date: ')
|
||||||
|
proc.sendline(date)
|
||||||
|
if storeview:
|
||||||
|
assert (m := await expect.endswith(re.compile(r'\((.*?)\)', flags=re.MULTILINE|re.DOTALL)))
|
||||||
|
date = datetime.datetime.strptime(m.group(1), '%b %d, %Y').date()
|
||||||
|
storeview.delete('date_scheduled=?', (str(date),))
|
||||||
|
assert await expect.endswith(re.compile(r'Include Primary Care assignment information in the output\? NO// ', flags=re.MULTILINE|re.DOTALL))
|
||||||
|
proc.sendline() # default NO
|
||||||
|
assert await expect.endswith('\r\nNumber of copies: 1// ')
|
||||||
|
proc.sendline() # default 1
|
||||||
|
assert await expect.endswith('\r\nDEVICE: HOME// ')
|
||||||
|
proc.sendline() # default HOME
|
||||||
|
assert await expect.earliest('Right Margin: 80// ')
|
||||||
|
proc.sendline() # default 80 (maximum 256)
|
||||||
|
async for clinic in vista_appointment_clinics(proc, expect):
|
||||||
|
body = re.split(r'(?:^|\n)([ \d]\d:\d{2} [AP]M)\n\n', clinic['body'])
|
||||||
|
for i in range(1, len(body), 2):
|
||||||
|
item = clinic.copy()
|
||||||
|
del item['body']
|
||||||
|
item['time_scheduled'] = datetime.datetime.combine(item['date_scheduled'], datetime.datetime.strptime(body[i].strip(), '%I:%M %p').time()).replace(tzinfo=local_tzinfo)
|
||||||
|
detail = re.sub(r'\r\n(\s{0,9}\S)', r'\1', body[i + 1]) # collapse hard wrap
|
||||||
|
name, detail = detail.split('\n', 1)
|
||||||
|
item['flag'] = name[:10].strip()
|
||||||
|
assert (match := re.search(r'^(?P<patient_name>\w.*?)\s+(?P<patient_last4>\d{4}).*?$', name[10:]))
|
||||||
|
item.update(match.groupdict())
|
||||||
|
match = re.search(r'^\s{41}\b(?P<location_ward>.*?)\n\s{41}\b(?P<location_bed>.*?)$', detail, re.MULTILINE)
|
||||||
|
if match:
|
||||||
|
item.update(match.groupdict())
|
||||||
|
match = re.search(r'^\s{10}Phone #: (?P<patient_phone>.*)$', detail, re.MULTILINE)
|
||||||
|
if match:
|
||||||
|
item.update(match.groupdict())
|
||||||
|
item['comment'] = '\r\n'.join(m.group(1) for m in re.finditer(r'^\s{15}(\w.*?)$', detail, re.MULTILINE))
|
||||||
|
yield item
|
||||||
|
proc.sendline('^Patient information AND OE/RR')
|
||||||
|
assert await expect.endswith('\r\nSelect Patient Information and OE/RR Option: ', '\r\nSelect Patient Information and OE/RR <TEST ACCOUNT> Option: ')
|
||||||
|
expect.clear()
|
||||||
|
|
||||||
|
async def vista_appointment_clinics(proc, expect):
|
||||||
|
"""List appointments by clinic, collecting all pages"""
|
||||||
|
item = None
|
||||||
|
while True:
|
||||||
|
m = await expect.earliest('\x07')
|
||||||
|
if m:
|
||||||
|
try:
|
||||||
|
head, body = re.split(r'\n_{10,}\n', m.before.replace('\r\n', '\n'))
|
||||||
|
except ValueError:
|
||||||
|
logger.warning('invalid page %r'%m.before)
|
||||||
|
continue
|
||||||
|
if body.strip() not in {'No activity found for this clinic date!', 'Clinic cancelled for this date!'}:
|
||||||
|
assert (m1 := re.search(r'^Date printed: (?P<time_printed>.*?)\s+Page: (?P<print_page>\d+)$', head, re.MULTILINE))
|
||||||
|
assert (m2 := re.search(r'Appointments for (?P<clinic>.+) clinic on (?P<day_scheduled>[A-Z]+) (?P<date_scheduled>(?:[A-Z]+) (?:[0-9]+), (?:[0-9]+))', head))
|
||||||
|
if int(m1.group('print_page')) > 1:
|
||||||
|
# next page of same report
|
||||||
|
assert datetime.datetime.strptime(m1.group('time_printed'), '%b %d,%Y@%H:%M') == item['time_printed']
|
||||||
|
assert m2.group('clinic') == item['clinic']
|
||||||
|
assert m2.group('day_scheduled') == item['day_scheduled']
|
||||||
|
assert datetime.datetime.strptime(m2.group('date_scheduled'), '%b %d, %Y').date() == item['date_scheduled']
|
||||||
|
item['body'] += '\r\n' + body.rstrip('\r\n') # concatenate report bodies
|
||||||
|
else:
|
||||||
|
# new report
|
||||||
|
if item is not None:
|
||||||
|
yield item
|
||||||
|
item = {
|
||||||
|
'time_printed': datetime.datetime.strptime(m1.group('time_printed'), '%b %d,%Y@%H:%M'),
|
||||||
|
'clinic': m2.group('clinic'),
|
||||||
|
'day_scheduled': m2.group('day_scheduled'),
|
||||||
|
'date_scheduled': datetime.datetime.strptime(m2.group('date_scheduled'), '%b %d, %Y').date(),
|
||||||
|
'body': body.strip('\r\n')
|
||||||
|
}
|
||||||
|
m1 = re.search(r'Appointments for (?P<clinic>.+) clinic on (?P<day>[A-Z]+) (?P<date>(?:[A-Z]+) (?:[0-9]+), (?:[0-9]+))', head)
|
||||||
|
assert await expect.endswith('\r\nPress return to continue or "^" to escape ')
|
||||||
|
proc.sendline()
|
||||||
|
else: # leftovers
|
||||||
|
if item is not None:
|
||||||
|
yield item
|
||||||
|
break
|
40
ext_session.py
Normal file
40
ext_session.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import re
|
||||||
|
import configparser
|
||||||
|
import autoproc
|
||||||
|
import XWBSSOi
|
||||||
|
|
||||||
|
async def task_smartcard(proc, config: Optional[configparser.ConfigParser]=None):
|
||||||
|
"""Smartcard authentication"""
|
||||||
|
async with proc.sendline, autoproc.expect_async(proc) as expect:
|
||||||
|
if await expect.endswith('\r\nACCESS CODE: ', timeout_settle=60):
|
||||||
|
try:
|
||||||
|
certificate = config['auth']['certificate']
|
||||||
|
except (TypeError, KeyError):
|
||||||
|
choice = XWBSSOi.get_vista_certificate()
|
||||||
|
certificate = XWBSSOi.get_certificate_thumbprint(choice).hex() if choice else None
|
||||||
|
doc = await XWBSSOi.get_sso_token_async(certificate=certificate)
|
||||||
|
if doc:
|
||||||
|
proc.sendline(re.sub(r'[\r\n]', '', doc))
|
||||||
|
if await expect.earliest(re.compile(r'^\s+You last signed on\s+', flags=re.MULTILINE), timeout_settle=5):
|
||||||
|
if certificate:
|
||||||
|
config.set('auth', 'certificate', certificate)
|
||||||
|
proc.create_task(task_keepalive(proc, True), name='@task:keepalive')
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def task_keepalive(proc, suppress=False):
|
||||||
|
"""Keepalive"""
|
||||||
|
with autoproc.expect(proc) as expect:
|
||||||
|
if suppress:
|
||||||
|
while True:
|
||||||
|
if m := await expect.endswith('\r\nDo you want to halt and continue with this option later? YES// ', '\r\nDo you really want to halt? YES// ', timeout_settle=60):
|
||||||
|
await proc.sendline.withlock('NO')
|
||||||
|
if m.index == 1:
|
||||||
|
print('Suppressed; type \'^\' or \'^Halt\' to actually halt.')
|
||||||
|
else:
|
||||||
|
while True:
|
||||||
|
if await expect.endswith('\r\nDo you want to halt and continue with this option later? YES// ', timeout_settle=60):
|
||||||
|
await proc.sendline.withlock('NO')
|
10
frontend/.gitignore
vendored
Normal file
10
frontend/.gitignore
vendored
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
.DS_Store
|
||||||
|
node_modules
|
||||||
|
/build
|
||||||
|
/.svelte-kit
|
||||||
|
/package
|
||||||
|
.env
|
||||||
|
.env.*
|
||||||
|
!.env.example
|
||||||
|
vite.config.js.timestamp-*
|
||||||
|
vite.config.ts.timestamp-*
|
1
frontend/.npmrc
Normal file
1
frontend/.npmrc
Normal file
@ -0,0 +1 @@
|
|||||||
|
engine-strict=true
|
38
frontend/README.md
Normal file
38
frontend/README.md
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
# create-svelte
|
||||||
|
|
||||||
|
Everything you need to build a Svelte project, powered by [`create-svelte`](https://github.com/sveltejs/kit/tree/main/packages/create-svelte).
|
||||||
|
|
||||||
|
## Creating a project
|
||||||
|
|
||||||
|
If you're seeing this, you've probably already done this step. Congrats!
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# create a new project in the current directory
|
||||||
|
npm create svelte@latest
|
||||||
|
|
||||||
|
# create a new project in my-app
|
||||||
|
npm create svelte@latest my-app
|
||||||
|
```
|
||||||
|
|
||||||
|
## Developing
|
||||||
|
|
||||||
|
Once you've created a project and installed dependencies with `npm install` (or `pnpm install` or `yarn`), start a development server:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run dev
|
||||||
|
|
||||||
|
# or start the server and open the app in a new browser tab
|
||||||
|
npm run dev -- --open
|
||||||
|
```
|
||||||
|
|
||||||
|
## Building
|
||||||
|
|
||||||
|
To create a production version of your app:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run build
|
||||||
|
```
|
||||||
|
|
||||||
|
You can preview the production build with `npm run preview`.
|
||||||
|
|
||||||
|
> To deploy your app, you may need to install an [adapter](https://kit.svelte.dev/docs/adapters) for your target environment.
|
21
frontend/package.json
Normal file
21
frontend/package.json
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"name": "vistassh-frontend",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"private": true,
|
||||||
|
"scripts": {
|
||||||
|
"dev": "vite dev",
|
||||||
|
"build": "vite build",
|
||||||
|
"preview": "vite preview"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@sveltejs/adapter-static": "^3.0.1",
|
||||||
|
"@sveltejs/kit": "^2.0.4",
|
||||||
|
"@sveltejs/vite-plugin-svelte": "^3.0.1",
|
||||||
|
"svelte": "^4.2.8",
|
||||||
|
"vite": "^5.0.10"
|
||||||
|
},
|
||||||
|
"type": "module",
|
||||||
|
"dependencies": {
|
||||||
|
"bootstrap": "^5.3.2"
|
||||||
|
}
|
||||||
|
}
|
12
frontend/src/app.html
Normal file
12
frontend/src/app.html
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<link rel="icon" href="%sveltekit.assets%/favicon.png" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
%sveltekit.head%
|
||||||
|
</head>
|
||||||
|
<body data-sveltekit-preload-data="tap">
|
||||||
|
<div style="display: contents">%sveltekit.body%</div>
|
||||||
|
</body>
|
||||||
|
</html>
|
2
frontend/src/routes/+layout.js
Normal file
2
frontend/src/routes/+layout.js
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
//export const prerender = true;
|
||||||
|
export const ssr = false;
|
30
frontend/src/routes/+layout.svelte
Normal file
30
frontend/src/routes/+layout.svelte
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
<script>
|
||||||
|
import { onDestroy } from 'svelte';
|
||||||
|
import 'bootstrap/dist/css/bootstrap.css';
|
||||||
|
import url_bootstrap from 'bootstrap/dist/js/bootstrap.bundle.js?url';
|
||||||
|
import { navlinks } from '$lib/stores.js';
|
||||||
|
import Navbar from '$lib/Navbar.svelte';
|
||||||
|
|
||||||
|
const links = navlinks.push({
|
||||||
|
name: 'VistA-SSH',
|
||||||
|
href: '/',
|
||||||
|
children: [
|
||||||
|
{ name: 'Lookup', href: '/lookup' },
|
||||||
|
{ name: 'Appointments', href: '/appointments' },
|
||||||
|
{ name: 'Clinics', href: '/clinics' },
|
||||||
|
{ name: 'RCRS', href: '/rcrs' },
|
||||||
|
{ name: 'Log', href: '/api/log.txt', target: '_blank' },
|
||||||
|
]
|
||||||
|
});
|
||||||
|
onDestroy(() => navlinks.pop(links));
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<svelte:head>
|
||||||
|
<script src={url_bootstrap}></script>
|
||||||
|
</svelte:head>
|
||||||
|
|
||||||
|
<Navbar />
|
||||||
|
|
||||||
|
<main class="container-md">
|
||||||
|
<slot />
|
||||||
|
</main>
|
15
frontend/src/routes/+page.svelte
Normal file
15
frontend/src/routes/+page.svelte
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
<svelte:head>
|
||||||
|
<title>VistA-SSH</title>
|
||||||
|
</svelte:head>
|
||||||
|
|
||||||
|
<div class="bg-body-tertiary p-5 rounded">
|
||||||
|
<h1 style="font-family: monospace; white-space: pre; text-align: center;">VVVV VVAAAA
|
||||||
|
VVVV VVAAAAAA
|
||||||
|
VVVV VVAA AAAA
|
||||||
|
VVVV VVAA AAAA
|
||||||
|
VVVV VVAA AAAA
|
||||||
|
VVVVVVAA AAAA
|
||||||
|
VVVVAA AAAAAAAAAAA
|
||||||
|
VVAA AAAAAAAAAAA</h1>
|
||||||
|
<h1 style="text-align: center;">🚧 VistA-SSH 🚧</h1>
|
||||||
|
</div>
|
11
frontend/src/routes/appointments/+page.js
Normal file
11
frontend/src/routes/appointments/+page.js
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
import { get_api_appointments } from '$lib/backend.js';
|
||||||
|
|
||||||
|
/** @type {import('./$types').PageLoad} */
|
||||||
|
export async function load({ params, fetch }) {
|
||||||
|
let clinics = await (await fetch('/api/config/user/clinics')).json();
|
||||||
|
let appointments = await get_api_appointments({ fetch, clinics, date: 'T' });
|
||||||
|
appointments.sort((a, b) => a.time_scheduled < b.time_scheduled ? -1 : a.time_scheduled > b.time_scheduled ? 1 : 0);
|
||||||
|
return {
|
||||||
|
clinics, appointments
|
||||||
|
};
|
||||||
|
}
|
69
frontend/src/routes/appointments/+page.svelte
Normal file
69
frontend/src/routes/appointments/+page.svelte
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
<script>
|
||||||
|
import { debounce, escapeHTML, escapeRegExp, strHashHSL, datetime_dtstr, filter_pattern, filter_test, filter_mark, filter_snippets } from '$lib/util.js';
|
||||||
|
|
||||||
|
export let data;
|
||||||
|
|
||||||
|
let query = '', pattern = null;
|
||||||
|
let debounced_pattern = debounce((/*query*/) => (pattern = query ? filter_pattern(escapeHTML(query)) : null), 200);
|
||||||
|
|
||||||
|
data.appointments.forEach(x => (delete x._content, x._content = escapeHTML(Object.values(x).join('\x00'))));
|
||||||
|
|
||||||
|
$: debounced_pattern(query); // argument `query` is for reactivity hinting only
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<svelte:head>
|
||||||
|
<title>Appointments</title>
|
||||||
|
</svelte:head>
|
||||||
|
|
||||||
|
<h1>Appointments</h1>
|
||||||
|
<div class="card mb-3 shadow">
|
||||||
|
{#if data.appointments.length > 0}
|
||||||
|
<table class="table appointments">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th colspan="100" style="padding: 0;">
|
||||||
|
<div class="input-group">
|
||||||
|
<input type="text" class="form-control" placeholder="Filter" name="q" bind:value={query} />
|
||||||
|
{#if query}<button type="button" class="btn btn-outline-secondary" on:click={() => query = ''}>❌</button>{/if}
|
||||||
|
</div>
|
||||||
|
</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{#if pattern}
|
||||||
|
{#each data.appointments as row}
|
||||||
|
{#if filter_test(pattern, row._content)}
|
||||||
|
<tr style:--bs-table-bg={strHashHSL(row.clinic, '85%')}>
|
||||||
|
<td><div>{row.clinic}</div><div>{datetime_dtstr(new Date(row.time_scheduled))}</div></td>
|
||||||
|
<td><div><a href="/lookup?q={encodeURIComponent(row.patient_name.charAt(0) + row.patient_last4)}&name={encodeURIComponent(row.patient_name)}&rd=true">{row.patient_name} {row.patient_last4}</a></div><div>{row.patient_phone}</div></td>
|
||||||
|
<td class="comment">{@html filter_mark(pattern, escapeHTML(row.comment))}</td>
|
||||||
|
</tr>
|
||||||
|
{/if}
|
||||||
|
{/each}
|
||||||
|
{:else}
|
||||||
|
{#each data.appointments as row}
|
||||||
|
<tr style:--bs-table-bg={strHashHSL(row.clinic, '85%')}>
|
||||||
|
<td><div>{row.clinic}</div><div>{datetime_dtstr(new Date(row.time_scheduled))}</div></td>
|
||||||
|
<td><div><a href="/lookup?q={encodeURIComponent(row.patient_name.charAt(0) + row.patient_last4)}&name={encodeURIComponent(row.patient_name)}&rd=true">{row.patient_name} {row.patient_last4}</a></div><div>{row.patient_phone}</div></td>
|
||||||
|
<td class="comment">{row.comment}</td>
|
||||||
|
</tr>
|
||||||
|
{/each}
|
||||||
|
{/if}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
:global(table.appointments mark) {
|
||||||
|
padding: 0;
|
||||||
|
font-weight: bold;
|
||||||
|
background-color: #fff;
|
||||||
|
}
|
||||||
|
.card table.table {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
td.comment {
|
||||||
|
white-space: pre-line;
|
||||||
|
}
|
||||||
|
</style>
|
14
frontend/src/routes/chart/[mrn]/+layout.js
Normal file
14
frontend/src/routes/chart/[mrn]/+layout.js
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
import { get_api_lookup } from '$lib/backend.js';
|
||||||
|
|
||||||
|
/** @type {import('./$types').LayoutLoad} */
|
||||||
|
export async function load({ params, fetch }) {
|
||||||
|
const mrn = params.mrn;
|
||||||
|
const facesheet = await get_api_lookup({ fetch, query: mrn, ordinal: '0', force: true });
|
||||||
|
const match = /^(?<name>[^\r\n;]+);(?:\((?<alias>[^\)]*?)\))? (?:(?<icn>\d+) )?(?<ssn>\d{3}-\d{2}-\d{4}P?) (?<dob>.+?)\s*$/m.exec(facesheet);
|
||||||
|
const facesheet_data = match ? match.groups : {};
|
||||||
|
const m_sex = /^Birth Sex[ ]+:[ ]+(.+?)$/m.exec(facesheet);
|
||||||
|
if(m_sex) facesheet_data.sex = m_sex[1];
|
||||||
|
return {
|
||||||
|
mrn, sensitive: facesheet.includes('***RESTRICTED RECORD***'), facesheet, facesheet_data
|
||||||
|
};
|
||||||
|
}
|
19
frontend/src/routes/chart/[mrn]/+layout.svelte
Normal file
19
frontend/src/routes/chart/[mrn]/+layout.svelte
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
<script>
|
||||||
|
import { onDestroy } from 'svelte';
|
||||||
|
import { navlinks } from '$lib/stores.js';
|
||||||
|
|
||||||
|
export let data;
|
||||||
|
const links = navlinks.push({
|
||||||
|
name: data.facesheet_data.name ? data.facesheet_data.name.split(',')[0].toLowerCase().replace(/(?:^|\s|["'([{])+\S/g, m => m.toUpperCase()) + ' ' + data.facesheet_data.ssn.split('-')[2] : 'Chart',
|
||||||
|
href: '/chart/' + data.mrn,
|
||||||
|
children: [
|
||||||
|
{ name: 'Orders', href: '/chart/' + data.mrn + '/orders' },
|
||||||
|
{ name: 'Labs', href: '/chart/' + data.mrn + '/labs' },
|
||||||
|
{ name: 'Notes', href: '/chart/' + data.mrn + '/notes' },
|
||||||
|
{ name: 'Flowsheet', href: '/chart/' + data.mrn + '/flowsheet' },
|
||||||
|
]
|
||||||
|
});
|
||||||
|
onDestroy(() => navlinks.pop(links));
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<slot />
|
17
frontend/src/routes/chart/[mrn]/+page.svelte
Normal file
17
frontend/src/routes/chart/[mrn]/+page.svelte
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
<script>
|
||||||
|
import { page } from '$app/stores';
|
||||||
|
|
||||||
|
/** @type {import('./$types').PageData} */
|
||||||
|
export let data;
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<svelte:head>
|
||||||
|
<title>{data.facesheet_data.name}</title>
|
||||||
|
</svelte:head>
|
||||||
|
|
||||||
|
<h1>{data.facesheet_data.name}{#if data.sensitive} ⚠{/if}</h1>
|
||||||
|
<div class="card">
|
||||||
|
<div class="card-body">
|
||||||
|
<pre class="card-text">{data.facesheet}</pre>
|
||||||
|
</div>
|
||||||
|
</div>
|
14
frontend/src/routes/chart/[mrn]/flowsheet/+page.js
Normal file
14
frontend/src/routes/chart/[mrn]/flowsheet/+page.js
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
import { get_api_measurements, get_api_labs } from '$lib/backend.js';
|
||||||
|
|
||||||
|
const time_min = new Date(1700, 0, 1);
|
||||||
|
|
||||||
|
/** @type {import('./$types').PageLoad} */
|
||||||
|
export async function load({ params, fetch, parent }) {
|
||||||
|
const mrn = params.mrn, offset = 30;
|
||||||
|
const parentdata = await parent();
|
||||||
|
const measurements = await get_api_measurements({ fetch, mrn, alpha: 'T-' + offset, omega: 'N' });
|
||||||
|
const labs = await get_api_labs({ fetch, mrn, alpha: 'T-' + offset, omega: 'N' });
|
||||||
|
return {
|
||||||
|
mrn, offset, facesheet_data: parentdata.facesheet_data, measurements, labs
|
||||||
|
};
|
||||||
|
}
|
483
frontend/src/routes/chart/[mrn]/flowsheet/+page.svelte
Normal file
483
frontend/src/routes/chart/[mrn]/flowsheet/+page.svelte
Normal file
@ -0,0 +1,483 @@
|
|||||||
|
<script>
|
||||||
|
import { tick } from 'svelte';
|
||||||
|
import { uniq, groupByArray, quantile_sorted, debounce, escapeHTML, escapeRegExp, strHashHSL, datetime_datestr, datetime_timestr, isInViewport, filter_pattern, filter_test, filter_mark, filter_snippets_lines, inherit } from '$lib/util.js';
|
||||||
|
import { get_api_measurements, get_api_labs } from '$lib/backend.js';
|
||||||
|
|
||||||
|
export let data;
|
||||||
|
|
||||||
|
let query = '', pattern = null, selection = null, component_items = decorate_measurements(data.measurements).concat(decorate_labs(data.labs));
|
||||||
|
let debounced_pattern = debounce((/*query*/) => (pattern = query ? filter_pattern(escapeHTML(query)) : null), 200);
|
||||||
|
|
||||||
|
$: debounced_pattern(query); // argument `query` is for reactivity hinting only
|
||||||
|
|
||||||
|
function decorate_measurements(xs) {
|
||||||
|
return xs.map(x => (x = Object.assign({ _content: escapeHTML(Object.values(x).join('\x00')), _ts: new Date(x.time) }, x), x.timestr = datetime_timestr(x._ts), x));
|
||||||
|
}
|
||||||
|
|
||||||
|
function decorate_labs(xs) {
|
||||||
|
return components(xs.map(x => Object.assign({ _content: escapeHTML(Object.values(x).join('\x00')), _ts: new Date(x.time_collected) }, x)));
|
||||||
|
}
|
||||||
|
|
||||||
|
function components(reports) {
|
||||||
|
const res = [], re_lab_test = /^(?<name>\w[^\r\n]{26})(?<value>[^\r\n]{8}) (?:(?<flag>\w[ \*])|[ ]) (?<unit>[^\r\n]{10}) (?<range>[^\r\n]{16}) \[(?<site>\d+)\]$/gm;
|
||||||
|
reports.forEach(function(x) {
|
||||||
|
let datestr = datetime_datestr(x._ts), timestr = datetime_timestr(x._ts), m, r;
|
||||||
|
while(m = re_lab_test.exec(x.body)) {
|
||||||
|
m = m.groups;
|
||||||
|
for(let k in m) if(m[k]) m[k] = m[k].replace(/^\s+|\s+$/g, '');
|
||||||
|
if((r = m.range) && (r.includes(' - '))) {
|
||||||
|
r = r.split(' - ');
|
||||||
|
m.rangeL = r[0];
|
||||||
|
m.rangeH = r[1];
|
||||||
|
}
|
||||||
|
m.datestr = datestr;
|
||||||
|
m.timestr = timestr;
|
||||||
|
m.report = x;
|
||||||
|
Object.assign(m, x);
|
||||||
|
delete m.body;
|
||||||
|
delete m._content;
|
||||||
|
res.push(m);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
function calculate(items) {
|
||||||
|
var snapshots = [], results = [], history, update, item;
|
||||||
|
groupByArray(items, x => x._ts).map(group => group.values.reduce(((acc, x) => (acc.values[x.name] = x, acc)), { key: group.key, values: {}})).sort((a, b) => (a.key > b.key) - (a.key < b.key)).forEach(function(group) {
|
||||||
|
snapshots.push({ key: group.key, values: history = Object.assign(snapshots.length > 0 ? inherit(snapshots[snapshots.length - 1].values) : inherit(calc_constants), update = group.values) });
|
||||||
|
history['Time'] = update['Time'] = { time: group.key, value: group.key };
|
||||||
|
for(var i = 0; i < calc_functions.length; ++i) {
|
||||||
|
var calculation = calc_functions[i], deps = calculation.deps;
|
||||||
|
for(var j = deps.length - 1, satisfied = true, updated = false; j >= 0; --j) if(!history[deps[j]]) { satisfied = false; break; }
|
||||||
|
else if(update[deps[j]]) updated = true;
|
||||||
|
if((satisfied) && (updated)) {
|
||||||
|
item = calculation.calc(...calculation.deps.map(x => history[x].value), history[calculation.name] && history[calculation.name].value);
|
||||||
|
if((item !== undefined) && (item !== null) && (item === item) && (item != 'NaN')) { // item === item if not NaN
|
||||||
|
results.push(history[calculation.name] = update[calculation.name] = Object.assign({ _ts: group.key, value: item }, calculation));
|
||||||
|
if((calculation.hasOwnProperty('rangeL')) && (item < calculation.rangeL)) update[calculation.name].flag = 'L';
|
||||||
|
else if((calculation.hasOwnProperty('rangeH')) && (item > calculation.rangeH)) update[calculation.name].flag = 'H';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
const calc_constants = {
|
||||||
|
DOB: { _ts: null, value: new Date(data.facesheet_data.dob) },
|
||||||
|
Sex: { _ts: null, value: data.facesheet_data.sex }
|
||||||
|
};
|
||||||
|
const calc_functions = [
|
||||||
|
{ name: 'Age', unit: 'yr', deps: ['Time', 'DOB'], calc(Time, DOB, prev) { var x = Math.floor((Time - DOB.getTime())/3.15576e10); return x != prev ? x : undefined; } },
|
||||||
|
{ name: 'BMI', unit: 'kg/m²', rangeL: 18.5, rangeH: 24.9, range: '18.5 - 24.9', deps: ['Ht', 'Wt'], calc: (Ht, Wt) => (10000*Wt/(Ht*Ht)).toPrecision(3) },
|
||||||
|
{ name: 'BSA', unit: 'm²', deps: ['Ht', 'Wt'], calc: (Ht, Wt) => (0.007184*Math.pow(Ht, 0.725)*Math.pow(Wt, 0.425)).toPrecision(3) },
|
||||||
|
{ name: 'CrCl', unit: 'mL/min', deps: ['Age', 'Sex', 'Wt', 'CREATININE'], calc: (Age, Sex, Wt, CREATININE) => (((140 - Age) * Wt)/(72*CREATININE)*(Sex == 'MALE' ? 1 : 0.85)).toPrecision(4) },
|
||||||
|
{ name: 'RETICYLOCYTE#', unit: 'K/cmm', rangeL: 50, rangeH: 100, range: '50 - 100', deps: ['RBC', 'RETICULOCYTES'], calc: (RBC, RETICULOCYTES) => (10*RBC*RETICULOCYTES).toPrecision(3) }
|
||||||
|
];
|
||||||
|
|
||||||
|
$: component_calculated = component_items.concat(calculate(component_items));
|
||||||
|
$: component_groups = groupByArray(component_calculated, x => new Date(x._ts.getFullYear(), x._ts.getMonth(), x._ts.getDate())).map(function(group) {
|
||||||
|
group = group.values.reduce(((acc, x) => ((acc.values[x.name] || (acc.values[x.name] = [])).push(x), acc)), { key: group.key, datestr: datetime_datestr(group.key), datestr_year: group.key.getFullYear(), values: {}});
|
||||||
|
for(var k in group.values) if(group.values.hasOwnProperty(k)) {
|
||||||
|
var items = group.values[k].sort((a, b) => a._ts - b._ts);
|
||||||
|
var strings = items.map(item => item.timestr + ' • ' + item.value + (item.unit ? ' ' + item.unit : '') + (item.flag ? ' [' + item.flag + ']' : '')/* + (item.comment && item.comment.indexOf('\n') < 0 ? ' • ' + item.comment : '')*/);
|
||||||
|
var flags = uniq(items.map(item => item.flag).filter(x => x).map(x => x.charAt(0)));
|
||||||
|
//var comments = uniq(items.map(item => item.comment).filter(x => x && x.indexOf('\n') >= 0));
|
||||||
|
var numbers = uniq(items.map(item => item.value).filter(x => isNumeric(x)));
|
||||||
|
var min = Math.min.apply(null, numbers);
|
||||||
|
var max = Math.max.apply(null, numbers);
|
||||||
|
group.values[k] = {
|
||||||
|
time: group.key,
|
||||||
|
name: k,
|
||||||
|
unit: items[0].unit,
|
||||||
|
range: items[0].range,
|
||||||
|
value: numbers.length > 1 ? min + ' - ' + max : numbers.length == 1 ? numbers[0] : items.length == 1 ? items[0].value : 'MULTIPLE', min: min, max: max,
|
||||||
|
flag: flags.length > 1 ? '*' : flags.length == 1 ? flags[0] : null,
|
||||||
|
reports: items.map(item => item.report).filter(x => x),
|
||||||
|
comment: (strings.join('\n') /*+ '\n\n' + comments.join('\n\n')*/).replace(/^\s+|\s+$/g, '')
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return group;
|
||||||
|
}).sort((a, b) => (a.key > b.key) - (a.key < b.key));
|
||||||
|
$: component_names = uniq(component_calculated.map(x => x.name));
|
||||||
|
$: component_stats = statistics(component_calculated);
|
||||||
|
|
||||||
|
function isNumeric(n) {
|
||||||
|
return !isNaN(parseFloat(n)) && isFinite(n);
|
||||||
|
}
|
||||||
|
function statistics(resultset) {
|
||||||
|
var res = {}, group, item;
|
||||||
|
for(var i = resultset.length - 1; i >= 0; --i) {
|
||||||
|
item = resultset[i];
|
||||||
|
if(isNumeric(item.value)) {
|
||||||
|
if(res[item.name]) res[item.name].push(+item.value);
|
||||||
|
else res[item.name] = [+item.value];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for(var k in res) if(res.hasOwnProperty(k)) {
|
||||||
|
item = res[k].sort((a, b) => a - b);
|
||||||
|
item = res[k] = {
|
||||||
|
n: item.length,
|
||||||
|
q25: quantile_sorted(item, 0.25),
|
||||||
|
q50: quantile_sorted(item, 0.50),
|
||||||
|
q75: quantile_sorted(item, 0.75)
|
||||||
|
}
|
||||||
|
item.range = item.q25 != item.q75 ? ('IQR: ' + item.q25 + ' - ' + item.q75) : ('Median: ' + item.q50);
|
||||||
|
}
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
function tooltip(item) {
|
||||||
|
if(item) {
|
||||||
|
var res = [], stat;
|
||||||
|
if(item.range) res.push('Ref: ' + item.range + ' ' + item.unit + (item.flag ? ' [' + item.flag + ']' : ''));
|
||||||
|
if(stat = component_stats[item.name]) res.push(stat.range + (item.range ? ' ' + item.unit : '') + (isNaN(parseFloat(item.value)) ? '' : item.value < stat.q25 ? ' [L]' : item.value > stat.q75 ? ' [H]' : ''));
|
||||||
|
if(item.comment) {
|
||||||
|
if(res.length > 0) res.push('');
|
||||||
|
res.push(item.comment);
|
||||||
|
}
|
||||||
|
return res.join('\n');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
function abnormal_ref(item) {
|
||||||
|
return (item) && (item.flag);
|
||||||
|
};
|
||||||
|
function abnormal_ref_low(item) {
|
||||||
|
return (item) && (item.flag) && (item.flag.indexOf('L') >= 0);
|
||||||
|
};
|
||||||
|
function abnormal_ref_high(item) {
|
||||||
|
return (item) && (item.flag) && (item.flag.indexOf('H') >= 0);
|
||||||
|
};
|
||||||
|
function abnormal_iqr(item) {
|
||||||
|
var stat;
|
||||||
|
if((item) && (stat = component_stats[item.name]) && (stat.n > 2)) {
|
||||||
|
if((item.hasOwnProperty('min')) && (item.hasOwnProperty('max'))) return (item.min < stat.q25) || (item.max > stat.q75);
|
||||||
|
else if(isNumeric(item.value)) return (item.value < stat.q25) || (item.value > stat.q75);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
function abnormal_iqr_low(item) {
|
||||||
|
var stat;
|
||||||
|
if((item) && (stat = component_stats[item.name]) && (stat.n > 2)) {
|
||||||
|
if((item.hasOwnProperty('min')) && (item.hasOwnProperty('max'))) return item.min < stat.q25;
|
||||||
|
else if(isNumeric(item.value)) return item.value < stat.q25;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
function abnormal_iqr_high(item) {
|
||||||
|
var stat;
|
||||||
|
if((item) && (stat = component_stats[item.name]) && (stat.n > 2)) {
|
||||||
|
if((item.hasOwnProperty('min')) && (item.hasOwnProperty('max'))) return item.max > stat.q75;
|
||||||
|
else if(isNumeric(item.value)) return item.value > stat.q75;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
function abstract(body) {
|
||||||
|
let re = /^(\w.*?)[ \t]{2,}\S.*?[ \t]{2,}/gm, m, res = [];
|
||||||
|
while(m = re.exec(body)) res.push(m[1]);
|
||||||
|
re = /^\*[ ]+((?:.+?) REPORT) =>/gm;
|
||||||
|
while(m = re.exec(body)) res.push(m[1]);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadmore(evt, factor = 1.5) {
|
||||||
|
if(loadmore.loading) return;
|
||||||
|
loadmore.loading = true;
|
||||||
|
try {
|
||||||
|
const next_offset = data.offset + (loadmore.limit = (factor*loadmore.limit)|0);
|
||||||
|
const measurements = await get_api_measurements({ mrn: data.mrn, omega: 'T-' + (data.offset + 1), alpha: 'T-' + next_offset });
|
||||||
|
Array.prototype.push.apply(component_items, decorate_measurements(measurements));
|
||||||
|
const labs = await get_api_labs({ mrn: data.mrn, omega: 'T-' + (data.offset + 1), alpha: 'T-' + next_offset });
|
||||||
|
Array.prototype.push.apply(component_items, decorate_labs(labs));
|
||||||
|
component_items = component_items; // reactivity hint
|
||||||
|
data.offset = next_offset;
|
||||||
|
} finally {
|
||||||
|
loadmore.loading = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
loadmore.loading = false;
|
||||||
|
loadmore.limit = 30;
|
||||||
|
|
||||||
|
let header;
|
||||||
|
(async function loadinit(target = 16, requests = 4) {
|
||||||
|
await tick();
|
||||||
|
for(let i = 0; (i < requests) && (component_groups.length < target); ++i) await loadmore();
|
||||||
|
await tick();
|
||||||
|
if((header) && (component_groups)) header.children[header.children.length - 1].scrollIntoView({ block: 'nearest', inline: 'end' });
|
||||||
|
})();
|
||||||
|
|
||||||
|
/*
|
||||||
|
const observer = new IntersectionObserver((entries) => { if((!query) && (entries[0].isIntersecting)) loadmore(null); }, { root: null, rootMargin: '0px', threshold: 0.5 });
|
||||||
|
let bottom = null;
|
||||||
|
$: {
|
||||||
|
observer.disconnect();
|
||||||
|
if(bottom) observer.observe(bottom);
|
||||||
|
}
|
||||||
|
|
||||||
|
let reportlist;
|
||||||
|
async function scroll(selection) {
|
||||||
|
if(selection) {
|
||||||
|
await tick();
|
||||||
|
const el = reportlist.querySelector('.active');
|
||||||
|
if((el) && (!isInViewport(el, true))) el.scrollIntoView({ block: 'center' });
|
||||||
|
} else {
|
||||||
|
const items = reportlist.children;
|
||||||
|
for(let i = 0, el; i < items.length; ++i) if(isInViewport(el = items[i])) {
|
||||||
|
await tick();
|
||||||
|
el.scrollIntoView({ block: 'start' });
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$: if(reportlist) scroll(selection);
|
||||||
|
*/
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<svelte:head>
|
||||||
|
<title>Flowsheet</title>
|
||||||
|
</svelte:head>
|
||||||
|
|
||||||
|
{#if (selection) && (selection.length > 0)}
|
||||||
|
<div class="halfpane rightpane">
|
||||||
|
{#each selection as row, idx}
|
||||||
|
<nav class="navbar bg-body-secondary">
|
||||||
|
<div class="container-fluid">
|
||||||
|
<span class="navbar-brand">{datetime_datestr(row._ts)}@{datetime_timestr(row._ts)} {row.accession} {row.specimen}</span>
|
||||||
|
{#if idx == 0}<button type="button" class="btn btn-outline-light" on:click={() => selection = null}>❌</button>{/if}
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
<div class="container-fluid report">{@html pattern ? filter_mark(pattern, escapeHTML(row.body)) : escapeHTML(row.body)}</div>
|
||||||
|
{/each}
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
<div class={(selection) && (selection.length > 0) ? 'halfpane leftpane' : ''}>
|
||||||
|
<div style="font-family: monospace;" role="region" tabindex="0">
|
||||||
|
<table class="table-sticky table-data">
|
||||||
|
<thead>
|
||||||
|
<tr bind:this={header}>
|
||||||
|
<th class="corner">
|
||||||
|
<input type="text" class="form-control" placeholder="Filter..." bind:value={query}>
|
||||||
|
{#if !loadmore.loading}<span class="badge rounded-pill bg-primary position-absolute top-0 start-100 translate-middle-x" style="cursor: pointer;" on:click={loadmore}>More</span>{/if}
|
||||||
|
</th>
|
||||||
|
{#each component_groups as group, idx}<th class="date" class:first={ (idx == 0) || (group.key != component_groups[idx - 1].key) }><div class="year">{group.datestr.substring(0, 4)}</div><div class="monthdate">{group.datestr.substring(5)}</div>{#if false}<div class="hourminute daily">{datetime_timestr(group.values[0]._ts)}</div>{/if}</th>{/each}
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
{#if pattern}
|
||||||
|
<tbody>
|
||||||
|
{#each component_names as name}
|
||||||
|
{#if filter_test(pattern, name)}
|
||||||
|
<tr class="match">
|
||||||
|
<th>{@html pattern ? filter_mark(pattern, escapeHTML(name)) : escapeHTML(name)}</th>
|
||||||
|
{#each component_groups as group, idx}
|
||||||
|
<td class:first={ (idx == 0) || (group.key != component_groups[idx - 1].key) } class:abnormal-ref={abnormal_ref(group.values[name])} class:abnormal-ref-low={abnormal_ref_low(group.values[name])} class:abnormal-ref-high={abnormal_ref_high(group.values[name])} class:abnormal-iqr={abnormal_iqr(group.values[name])} class:abnormal-iqr-low={abnormal_iqr_low(group.values[name])} class:abnormal-iqr-high={abnormal_iqr_high(group.values[name])} title={tooltip(group.values[name])} on:click={() => selection = (group.values[name]) && (group.values[name].reports)}>{group.values[name] ? group.values[name].value : ''}</td>
|
||||||
|
{/each}
|
||||||
|
</tr>
|
||||||
|
{/if}
|
||||||
|
{/each}
|
||||||
|
</tbody>
|
||||||
|
<tbody>
|
||||||
|
{#each component_names as name}
|
||||||
|
{#if !filter_test(pattern, name)}
|
||||||
|
<tr>
|
||||||
|
<th>{name}</th>
|
||||||
|
{#each component_groups as group, idx}
|
||||||
|
<td class:first={ (idx == 0) || (group.key != component_groups[idx - 1].key) } class:abnormal-ref={abnormal_ref(group.values[name])} class:abnormal-ref-low={abnormal_ref_low(group.values[name])} class:abnormal-ref-high={abnormal_ref_high(group.values[name])} class:abnormal-iqr={abnormal_iqr(group.values[name])} class:abnormal-iqr-low={abnormal_iqr_low(group.values[name])} class:abnormal-iqr-high={abnormal_iqr_high(group.values[name])} title={tooltip(group.values[name])} on:click={() => selection = (group.values[name]) && (group.values[name].reports)}>{group.values[name] ? group.values[name].value : ''}</td>
|
||||||
|
{/each}
|
||||||
|
</tr>
|
||||||
|
{/if}
|
||||||
|
{/each}
|
||||||
|
</tbody>
|
||||||
|
{:else}
|
||||||
|
<tbody>
|
||||||
|
{#each component_names as name}
|
||||||
|
<tr>
|
||||||
|
<th>{name}</th>
|
||||||
|
{#each component_groups as group, idx}
|
||||||
|
<td class:first={ (idx == 0) || (group.key != component_groups[idx - 1].key) } class:abnormal-ref={abnormal_ref(group.values[name])} class:abnormal-ref-low={abnormal_ref_low(group.values[name])} class:abnormal-ref-high={abnormal_ref_high(group.values[name])} class:abnormal-iqr={abnormal_iqr(group.values[name])} class:abnormal-iqr-low={abnormal_iqr_low(group.values[name])} class:abnormal-iqr-high={abnormal_iqr_high(group.values[name])} title={tooltip(group.values[name])} on:click={() => selection = (group.values[name]) && (group.values[name].reports)}>{group.values[name] ? group.values[name].value : ''}</td>
|
||||||
|
{/each}
|
||||||
|
</tr>
|
||||||
|
{/each}
|
||||||
|
</tbody>
|
||||||
|
{/if}
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
:global(th mark) {
|
||||||
|
padding: 0;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
@media (prefers-reduced-motion: no-preference) {
|
||||||
|
:root {
|
||||||
|
scroll-behavior: auto;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
[role="region"][tabindex] {
|
||||||
|
max-height: calc(100vh - 4.5rem);
|
||||||
|
overflow: auto;
|
||||||
|
}
|
||||||
|
table {
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
table tr.match th:first-child {
|
||||||
|
background-color: #c6def1;
|
||||||
|
}
|
||||||
|
table th, table td {
|
||||||
|
padding: 0 0.5rem;
|
||||||
|
white-space: nowrap;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
table.table-sticky thead th {
|
||||||
|
position: sticky;
|
||||||
|
z-index: 1010;
|
||||||
|
top: 0;
|
||||||
|
background-color: #fff;
|
||||||
|
}
|
||||||
|
table.table-sticky th:first-child {
|
||||||
|
position: sticky;
|
||||||
|
z-index: 1010;
|
||||||
|
left: 0;
|
||||||
|
background-color: #fff;
|
||||||
|
}
|
||||||
|
table.table-sticky thead th.corner {
|
||||||
|
padding: 0;
|
||||||
|
z-index: 1020;
|
||||||
|
}
|
||||||
|
.navbar {
|
||||||
|
position: sticky;
|
||||||
|
z-index: 1020;
|
||||||
|
top: 3.5rem;
|
||||||
|
}
|
||||||
|
.leftpane {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
li.active {
|
||||||
|
scroll-margin-top: 3.5rem;
|
||||||
|
}
|
||||||
|
div.singleline {
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
.abstract {
|
||||||
|
font-size: 0.8em;
|
||||||
|
}
|
||||||
|
.snippets {
|
||||||
|
font-family: monospace;
|
||||||
|
}
|
||||||
|
.report {
|
||||||
|
font-family: monospace;
|
||||||
|
white-space: pre-wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.table-sticky {
|
||||||
|
border: 2px solid #dee2e6;
|
||||||
|
}
|
||||||
|
table.table-sticky th:first-child {
|
||||||
|
border-right: 2px solid #dee2e6;
|
||||||
|
}
|
||||||
|
table.table-sticky tbody {
|
||||||
|
border-top: 2px solid #dee2e6;
|
||||||
|
}
|
||||||
|
table.table-sticky tbody tr {
|
||||||
|
border-top: 1px dashed #dee2e6;
|
||||||
|
}
|
||||||
|
table.table-sticky tbody tr:hover {
|
||||||
|
border: 1px solid #6c757d;
|
||||||
|
}
|
||||||
|
td:nth-of-type(odd) {
|
||||||
|
background-color: rgba(0, 0, 0, 0.05);
|
||||||
|
}
|
||||||
|
table.table-sticky tbody th, table.table-sticky th.name {
|
||||||
|
cursor: default;
|
||||||
|
font-weight: bold;
|
||||||
|
text-align: center;
|
||||||
|
padding-left: 1rem;
|
||||||
|
padding-right: 1rem;
|
||||||
|
}
|
||||||
|
table.table-sticky th.filtered {
|
||||||
|
background-color: #6c757d;
|
||||||
|
color: #fff;
|
||||||
|
}
|
||||||
|
table.table-sticky th.date {
|
||||||
|
cursor: default;
|
||||||
|
font-size: 80%;
|
||||||
|
font-weight: normal;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
table.table-sticky th.date .monthdate {
|
||||||
|
font-size: 125%;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
table.table-sticky th.date .hourminute.daily {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
table.table-sticky tbody td {
|
||||||
|
padding: 0 0.5rem;
|
||||||
|
max-width: 12rem;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
.first {
|
||||||
|
border-left: 1px solid #dee2e6;
|
||||||
|
}
|
||||||
|
.year, .monthdate {
|
||||||
|
visibility: hidden;
|
||||||
|
}
|
||||||
|
.first .year, .first .monthdate {
|
||||||
|
visibility: visible;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.table-data .abnormal-ref::after {
|
||||||
|
content: ' *';
|
||||||
|
}
|
||||||
|
table.table-data .abnormal-ref-low::after {
|
||||||
|
content: ' L';
|
||||||
|
}
|
||||||
|
table.table-data .abnormal-ref-high::after {
|
||||||
|
content: ' H';
|
||||||
|
}
|
||||||
|
table.table-data .abnormal-iqr, table.table-data .abnormal-iqr-low.abnormal-iqr-high {
|
||||||
|
color: #f39a27;
|
||||||
|
}
|
||||||
|
table.table-data .abnormal-iqr-low {
|
||||||
|
color: #976ed7;
|
||||||
|
}
|
||||||
|
table.table-data .abnormal-iqr-high {
|
||||||
|
color: #c23b23;
|
||||||
|
}
|
||||||
|
table.table-data .abnormal-ref, table.table-data .abnormal-iqr {
|
||||||
|
background-color: #fbffde;
|
||||||
|
}
|
||||||
|
table.table-data .abnormal-ref-low.abnormal-iqr-low, table.table-data .abnormal-ref-high.abnormal-iqr-high {
|
||||||
|
font-weight: bold;
|
||||||
|
background-color: #ffd1d1;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media screen and (min-width: 720px) {
|
||||||
|
.halfpane {
|
||||||
|
position: absolute;
|
||||||
|
top: 3.5rem;
|
||||||
|
bottom: 0;
|
||||||
|
width: 50%;
|
||||||
|
overflow: auto;
|
||||||
|
}
|
||||||
|
.leftpane {
|
||||||
|
display: block;
|
||||||
|
left: 0;
|
||||||
|
z-index: -1;
|
||||||
|
}
|
||||||
|
.leftpane [role="region"][tabindex] {
|
||||||
|
max-height: calc(100vh - 3.5rem);
|
||||||
|
direction: rtl;
|
||||||
|
}
|
||||||
|
.leftpane [role="region"][tabindex] > * {
|
||||||
|
direction: ltr;
|
||||||
|
}
|
||||||
|
.rightpane {
|
||||||
|
right: 0;
|
||||||
|
box-shadow: var(--bs-box-shadow);
|
||||||
|
}
|
||||||
|
.halfpane .navbar {
|
||||||
|
top: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
10
frontend/src/routes/chart/[mrn]/labs/+page.js
Normal file
10
frontend/src/routes/chart/[mrn]/labs/+page.js
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
import { get_api_labs } from '$lib/backend.js';
|
||||||
|
|
||||||
|
/** @type {import('./$types').PageLoad} */
|
||||||
|
export async function load({ params, fetch }) {
|
||||||
|
let mrn = params.mrn, offset = 30;
|
||||||
|
let reports = await get_api_labs({ fetch, mrn, alpha: 'T-' + offset, omega: 'N' });
|
||||||
|
return {
|
||||||
|
mrn, reports, offset
|
||||||
|
};
|
||||||
|
}
|
183
frontend/src/routes/chart/[mrn]/labs/+page.svelte
Normal file
183
frontend/src/routes/chart/[mrn]/labs/+page.svelte
Normal file
@ -0,0 +1,183 @@
|
|||||||
|
<script>
|
||||||
|
import { tick } from 'svelte';
|
||||||
|
import { debounce, escapeHTML, escapeRegExp, strHashHSL, datetime_datestr, datetime_timestr, isInViewport, filter_pattern, filter_test, filter_mark, filter_snippets_lines } from '$lib/util.js';
|
||||||
|
import { get_api_labs } from '$lib/backend.js';
|
||||||
|
|
||||||
|
export let data;
|
||||||
|
|
||||||
|
let query = '', pattern = null, selection = null, all_reports = decorate(data.reports);
|
||||||
|
let debounced_pattern = debounce((/*query*/) => (pattern = query ? filter_pattern(escapeHTML(query)) : null), 200);
|
||||||
|
|
||||||
|
$: debounced_pattern(query); // argument `query` is for reactivity hinting only
|
||||||
|
|
||||||
|
function decorate(xs) {
|
||||||
|
return xs.map(x => Object.assign({ _content: escapeHTML(Object.values(x).join('\x00')), _ts: new Date(x.time_collected) }, x));
|
||||||
|
}
|
||||||
|
|
||||||
|
function abstract(body) {
|
||||||
|
let re = /^(\w.*?)[ \t]{2,}\S.*?[ \t]{2,}/gm, m, res = [];
|
||||||
|
while(m = re.exec(body)) res.push(m[1]);
|
||||||
|
re = /^\*[ ]+((?:.+?) REPORT) =>/gm;
|
||||||
|
while(m = re.exec(body)) res.push(m[1]);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadmore(evt, factor = 1.5) {
|
||||||
|
if(loadmore.loading) return;
|
||||||
|
loadmore.loading = true;
|
||||||
|
try {
|
||||||
|
let reports = await get_api_labs({ mrn: data.mrn, omega: 'T-' + (data.offset + 1), alpha: 'T-' + (data.offset += (loadmore.limit = (factor*loadmore.limit)|0)) });
|
||||||
|
Array.prototype.push.apply(all_reports, decorate(reports));
|
||||||
|
all_reports = all_reports; // reactivity hint
|
||||||
|
} finally {
|
||||||
|
loadmore.loading = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
loadmore.loading = false;
|
||||||
|
loadmore.limit = 30;
|
||||||
|
|
||||||
|
(async function loadinit(target = 16, requests = 4) {
|
||||||
|
for(let i = 0; (i < requests) && (all_reports.length < target); ++i) await loadmore();
|
||||||
|
})();
|
||||||
|
|
||||||
|
const observer = new IntersectionObserver((entries) => { if((!query) && (entries[0].isIntersecting)) loadmore(null); }, { root: null, rootMargin: '0px', threshold: 0.5 });
|
||||||
|
let bottom = null;
|
||||||
|
$: {
|
||||||
|
observer.disconnect();
|
||||||
|
if(bottom) observer.observe(bottom);
|
||||||
|
}
|
||||||
|
|
||||||
|
let reportlist;
|
||||||
|
async function scroll(selection) {
|
||||||
|
if(selection) {
|
||||||
|
await tick();
|
||||||
|
const el = reportlist.querySelector('.active');
|
||||||
|
if((el) && (!isInViewport(el, true))) el.scrollIntoView({ block: 'center' });
|
||||||
|
} else {
|
||||||
|
const items = reportlist.children;
|
||||||
|
for(let i = 0, el; i < items.length; ++i) if(isInViewport(el = items[i])) {
|
||||||
|
await tick();
|
||||||
|
el.scrollIntoView({ block: 'start' });
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$: if(reportlist) scroll(selection);
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<svelte:head>
|
||||||
|
<title>Labs</title>
|
||||||
|
</svelte:head>
|
||||||
|
|
||||||
|
{#if selection}
|
||||||
|
<div class="halfpane rightpane">
|
||||||
|
<nav class="navbar bg-body-secondary">
|
||||||
|
<div class="container-fluid">
|
||||||
|
<span class="navbar-brand">{datetime_datestr(selection._ts)}@{datetime_timestr(selection._ts)} {selection.accession} {selection.specimen}</span>
|
||||||
|
<button type="button" class="btn btn-outline-light" on:click={() => selection = null}>❌</button>
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
<div class="container-fluid report">{@html pattern ? filter_mark(pattern, escapeHTML(selection.body)) : escapeHTML(selection.body)}</div>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
<div class={selection ? 'halfpane leftpane' : ''}>
|
||||||
|
<div class="card {selection ? '' : 'mb-3 shadow'}">
|
||||||
|
<nav class="navbar bg-body-tertiary">
|
||||||
|
<form class="container-fluid">
|
||||||
|
<div class="input-group">
|
||||||
|
<span class="input-group-text">Labs</span>
|
||||||
|
<input type="text" class="form-control" placeholder="Filter..." bind:value={query}>
|
||||||
|
{#if query}<button type="button" class="btn btn-outline-secondary" on:click={() => query = ''}>❌</button>{/if}
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</nav>
|
||||||
|
<ul class="list-group list-group-flush" bind:this={reportlist}>
|
||||||
|
{#if pattern}
|
||||||
|
{#each all_reports as row}
|
||||||
|
{#if filter_test(pattern, row._content)}
|
||||||
|
{@const abs = abstract(row.body)}
|
||||||
|
<li class="list-group-item" class:active={(selection) && (selection.uid == row.uid)} on:click={() => selection = selection !== row ? row : null}>
|
||||||
|
<div class="singleline" style="font-weight: bold;">{datetime_datestr(row._ts)}@{datetime_timestr(row._ts)} {row.accession} {row.specimen}</div>
|
||||||
|
{#if abs.length > 0}<div class="abstract singleline">{abs.join(', ')}</div>{/if}
|
||||||
|
<div class="snippets">{#each filter_snippets_lines(pattern, escapeHTML(row.body), undefined, 3, 6) as match}<div>{@html match}</div>{/each}</div>
|
||||||
|
</li>
|
||||||
|
{/if}
|
||||||
|
{/each}
|
||||||
|
{:else}
|
||||||
|
{#each all_reports as row}
|
||||||
|
{@const abs = abstract(row.body)}
|
||||||
|
<li class="list-group-item" class:active={(selection) && (selection.uid == row.uid)} on:click={() => selection = selection !== row ? row : null}>
|
||||||
|
<div class="singleline" style="font-weight: bold;">{datetime_datestr(row._ts)}@{datetime_timestr(row._ts)} {row.accession} {row.specimen}</div>
|
||||||
|
{#if abs.length > 0}<div class="abstract singleline">{abs.join(', ')}</div>{/if}
|
||||||
|
</li>
|
||||||
|
{/each}
|
||||||
|
{/if}
|
||||||
|
<li class="list-group-item" style="padding: 0;" bind:this={bottom}>{#if loadmore.loading}<button type="button" class="btn btn-primary w-100" disabled>Loading...</button>{:else}<button type="button" class="btn btn-primary w-100" on:click={loadmore}>Load more</button>{/if}</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
:global(div.snippets mark, div.report mark) {
|
||||||
|
padding: 0;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
@media (prefers-reduced-motion: no-preference) {
|
||||||
|
:root {
|
||||||
|
scroll-behavior: auto;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.navbar {
|
||||||
|
position: sticky;
|
||||||
|
z-index: 1020;
|
||||||
|
top: 3.5rem;
|
||||||
|
}
|
||||||
|
.leftpane {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
li.active {
|
||||||
|
scroll-margin-top: 3.5rem;
|
||||||
|
}
|
||||||
|
div.singleline {
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
.abstract {
|
||||||
|
font-size: 0.8em;
|
||||||
|
}
|
||||||
|
.snippets {
|
||||||
|
font-family: monospace;
|
||||||
|
}
|
||||||
|
.report {
|
||||||
|
font-family: monospace;
|
||||||
|
white-space: pre-wrap;
|
||||||
|
}
|
||||||
|
@media screen and (min-width: 720px) {
|
||||||
|
.halfpane {
|
||||||
|
position: absolute;
|
||||||
|
top: 3.5rem;
|
||||||
|
bottom: 0;
|
||||||
|
width: 50%;
|
||||||
|
overflow: auto;
|
||||||
|
}
|
||||||
|
.leftpane {
|
||||||
|
display: block;
|
||||||
|
width: 33%;
|
||||||
|
left: 0;
|
||||||
|
z-index: -1;
|
||||||
|
direction: rtl;
|
||||||
|
}
|
||||||
|
.leftpane > * {
|
||||||
|
direction: ltr;
|
||||||
|
}
|
||||||
|
.rightpane {
|
||||||
|
width: 67%;
|
||||||
|
right: 0;
|
||||||
|
box-shadow: var(--bs-box-shadow);
|
||||||
|
}
|
||||||
|
.halfpane .navbar {
|
||||||
|
top: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
10
frontend/src/routes/chart/[mrn]/notes/+page.js
Normal file
10
frontend/src/routes/chart/[mrn]/notes/+page.js
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
import { get_api_notes } from '$lib/backend.js';
|
||||||
|
|
||||||
|
/** @type {import('./$types').PageLoad} */
|
||||||
|
export async function load({ params, fetch }) {
|
||||||
|
let mrn = params.mrn, offset = 30;
|
||||||
|
let reports = await get_api_notes({ fetch, mrn, alpha: 'T-' + offset, omega: 'N' });
|
||||||
|
return {
|
||||||
|
mrn, reports, offset
|
||||||
|
};
|
||||||
|
}
|
173
frontend/src/routes/chart/[mrn]/notes/+page.svelte
Normal file
173
frontend/src/routes/chart/[mrn]/notes/+page.svelte
Normal file
@ -0,0 +1,173 @@
|
|||||||
|
<script>
|
||||||
|
import { tick } from 'svelte';
|
||||||
|
import { debounce, escapeHTML, escapeRegExp, strHashHSL, datetime_datestr, datetime_timestr, isInViewport, filter_pattern, filter_test, filter_mark, filter_snippets } from '$lib/util.js';
|
||||||
|
import { get_api_notes } from '$lib/backend.js';
|
||||||
|
|
||||||
|
export let data;
|
||||||
|
|
||||||
|
let query = '', pattern = null, selection = null, all_reports = decorate(data.reports);
|
||||||
|
let debounced_pattern = debounce((/*query*/) => (pattern = query ? filter_pattern(escapeHTML(query)) : null), 200);
|
||||||
|
|
||||||
|
$: debounced_pattern(query); // argument `query` is for reactivity hinting only
|
||||||
|
|
||||||
|
function decorate(xs) {
|
||||||
|
return xs.map(x => Object.assign({ _content: escapeHTML(Object.values(x).join('\x00')), _uid: x.note_dated + x.local_title + x.standard_title + (x.visit || x.admitted), _ts: new Date(x.note_dated) }, x)).sort((a, b) => b._ts - a._ts);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadmore(evt, factor = 1.5) {
|
||||||
|
if(loadmore.loading) return;
|
||||||
|
loadmore.loading = true;
|
||||||
|
try {
|
||||||
|
let reports = await get_api_notes({ mrn: data.mrn, omega: 'T-' + (data.offset + 1), alpha: 'T-' + (data.offset += (loadmore.limit = (factor*loadmore.limit)|0)) });
|
||||||
|
Array.prototype.push.apply(all_reports, decorate(reports));
|
||||||
|
all_reports = all_reports; // reactivity hint
|
||||||
|
} finally {
|
||||||
|
loadmore.loading = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
loadmore.loading = false;
|
||||||
|
loadmore.limit = 30;
|
||||||
|
|
||||||
|
(async function loadinit(target = 16, requests = 4) {
|
||||||
|
for(let i = 0; (i < requests) && (all_reports.length < target); ++i) await loadmore();
|
||||||
|
})();
|
||||||
|
|
||||||
|
const observer = new IntersectionObserver((entries) => { if((!query) && (entries[0].isIntersecting)) loadmore(null); }, { root: null, rootMargin: '0px', threshold: 0.5 });
|
||||||
|
let bottom = null;
|
||||||
|
$: {
|
||||||
|
observer.disconnect();
|
||||||
|
if(bottom) observer.observe(bottom);
|
||||||
|
}
|
||||||
|
|
||||||
|
let reportlist;
|
||||||
|
async function scroll(selection) {
|
||||||
|
if(selection) {
|
||||||
|
await tick();
|
||||||
|
const el = reportlist.querySelector('.active');
|
||||||
|
if((el) && (!isInViewport(el, true))) el.scrollIntoView({ block: 'center' });
|
||||||
|
} else {
|
||||||
|
const items = reportlist.children;
|
||||||
|
for(let i = 0, el; i < items.length; ++i) if(isInViewport(el = items[i])) {
|
||||||
|
await tick();
|
||||||
|
el.scrollIntoView({ block: 'start' });
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$: if(reportlist) scroll(selection);
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<svelte:head>
|
||||||
|
<title>Progress notes</title>
|
||||||
|
</svelte:head>
|
||||||
|
|
||||||
|
{#if selection}
|
||||||
|
<div class="halfpane rightpane">
|
||||||
|
<nav class="navbar bg-body-secondary">
|
||||||
|
<div class="container-fluid">
|
||||||
|
<span class="navbar-brand">{datetime_datestr(selection._ts)}@{datetime_timestr(selection._ts)} {selection.local_title}</span>
|
||||||
|
<button type="button" class="btn btn-outline-light" on:click={() => selection = null}>❌</button>
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
<div class="container-fluid report">{@html pattern ? filter_mark(pattern, escapeHTML(selection.body)) : escapeHTML(selection.body)}</div>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
<div class={selection ? 'halfpane leftpane' : ''}>
|
||||||
|
<div class="card {selection ? '' : 'mb-3 shadow'}">
|
||||||
|
<nav class="navbar bg-body-tertiary">
|
||||||
|
<form class="container-fluid">
|
||||||
|
<div class="input-group">
|
||||||
|
<span class="input-group-text">Progress notes</span>
|
||||||
|
<input type="text" class="form-control" placeholder="Filter..." bind:value={query}>
|
||||||
|
{#if query}<button type="button" class="btn btn-outline-secondary" on:click={() => query = ''}>❌</button>{/if}
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</nav>
|
||||||
|
<ul class="list-group list-group-flush" bind:this={reportlist}>
|
||||||
|
{#if pattern}
|
||||||
|
{#each all_reports as row}
|
||||||
|
{#if filter_test(pattern, row._content)}
|
||||||
|
<li class="list-group-item" class:active={(selection) && (selection._uid == row._uid)} on:click={() => selection = selection !== row ? row : null}>
|
||||||
|
<div class="singleline" style="font-weight: bold;">{datetime_datestr(row._ts)}@{datetime_timestr(row._ts)} {row.local_title}</div>
|
||||||
|
<div class="abstract singleline">{row.visit || row.admitted}</div>
|
||||||
|
<div class="snippets">{#each filter_snippets(pattern, escapeHTML(row.body), undefined, 3, 6) as match}<div>{@html match}</div>{/each}</div>
|
||||||
|
</li>
|
||||||
|
{/if}
|
||||||
|
{/each}
|
||||||
|
{:else}
|
||||||
|
{#each all_reports as row}
|
||||||
|
<li class="list-group-item" class:active={(selection) && (selection._uid == row._uid)} on:click={() => selection = selection !== row ? row : null}>
|
||||||
|
<div class="singleline" style="font-weight: bold;">{datetime_datestr(row._ts)}@{datetime_timestr(row._ts)} {row.local_title}</div>
|
||||||
|
<div class="abstract singleline">{row.visit || row.admitted}</div>
|
||||||
|
</li>
|
||||||
|
{/each}
|
||||||
|
{/if}
|
||||||
|
<li class="list-group-item" style="padding: 0;" bind:this={bottom}>{#if loadmore.loading}<button type="button" class="btn btn-primary w-100" disabled>Loading...</button>{:else}<button type="button" class="btn btn-primary w-100" on:click={loadmore}>Load more</button>{/if}</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
:global(div.snippets mark, div.report mark) {
|
||||||
|
padding: 0;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
@media (prefers-reduced-motion: no-preference) {
|
||||||
|
:root {
|
||||||
|
scroll-behavior: auto;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.navbar {
|
||||||
|
position: sticky;
|
||||||
|
z-index: 1020;
|
||||||
|
top: 3.5rem;
|
||||||
|
}
|
||||||
|
.leftpane {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
li.active {
|
||||||
|
scroll-margin-top: 3.5rem;
|
||||||
|
}
|
||||||
|
div.singleline {
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
.abstract {
|
||||||
|
font-size: 0.8em;
|
||||||
|
}
|
||||||
|
.snippets {
|
||||||
|
font-family: monospace;
|
||||||
|
}
|
||||||
|
.report {
|
||||||
|
font-family: monospace;
|
||||||
|
white-space: pre-wrap;
|
||||||
|
}
|
||||||
|
@media screen and (min-width: 720px) {
|
||||||
|
.halfpane {
|
||||||
|
position: absolute;
|
||||||
|
top: 3.5rem;
|
||||||
|
bottom: 0;
|
||||||
|
width: 50%;
|
||||||
|
overflow: auto;
|
||||||
|
}
|
||||||
|
.leftpane {
|
||||||
|
display: block;
|
||||||
|
width: 33%;
|
||||||
|
left: 0;
|
||||||
|
z-index: -1;
|
||||||
|
direction: rtl;
|
||||||
|
}
|
||||||
|
.leftpane > * {
|
||||||
|
direction: ltr;
|
||||||
|
}
|
||||||
|
.rightpane {
|
||||||
|
width: 67%;
|
||||||
|
right: 0;
|
||||||
|
box-shadow: var(--bs-box-shadow);
|
||||||
|
}
|
||||||
|
.halfpane .navbar {
|
||||||
|
top: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
10
frontend/src/routes/chart/[mrn]/orders/+page.js
Normal file
10
frontend/src/routes/chart/[mrn]/orders/+page.js
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
import { get_api_orders } from '$lib/backend.js';
|
||||||
|
|
||||||
|
/** @type {import('./$types').PageLoad} */
|
||||||
|
export async function load({ params, fetch }) {
|
||||||
|
let mrn = params.mrn, offset = 30;
|
||||||
|
let reports = await get_api_orders({ fetch, mrn, alpha: 'T-' + offset, omega: 'N' });
|
||||||
|
return {
|
||||||
|
mrn, reports, offset
|
||||||
|
};
|
||||||
|
}
|
166
frontend/src/routes/chart/[mrn]/orders/+page.svelte
Normal file
166
frontend/src/routes/chart/[mrn]/orders/+page.svelte
Normal file
@ -0,0 +1,166 @@
|
|||||||
|
<script>
|
||||||
|
import { tick } from 'svelte';
|
||||||
|
import { debounce, escapeHTML, escapeRegExp, strHashHSL, isInViewport, filter_pattern, filter_test, filter_mark } from '$lib/util.js';
|
||||||
|
import { get_api_orders } from '$lib/backend.js';
|
||||||
|
|
||||||
|
export let data;
|
||||||
|
|
||||||
|
let query = '', pattern = null, selection = null, all_reports = decorate(data.reports);
|
||||||
|
let debounced_pattern = debounce((/*query*/) => (pattern = query ? filter_pattern(escapeHTML(query)) : null), 200);
|
||||||
|
|
||||||
|
$: debounced_pattern(query); // argument `query` is for reactivity hinting only
|
||||||
|
|
||||||
|
function decorate(xs) {
|
||||||
|
return xs.map(x => Object.assign({ _content: escapeHTML(Object.values(x).join('\x00')), _uid: x.body, _ts: new Date(x.datetime_entered) }, x));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadmore(evt, factor = 1.5) {
|
||||||
|
if(loadmore.loading) return;
|
||||||
|
loadmore.loading = true;
|
||||||
|
try {
|
||||||
|
let reports = await get_api_orders({ mrn: data.mrn, omega: 'T-' + (data.offset + 1), alpha: 'T-' + (data.offset += (loadmore.limit = (factor*loadmore.limit)|0)) });
|
||||||
|
Array.prototype.push.apply(all_reports, decorate(reports));
|
||||||
|
all_reports = all_reports; // reactivity hint
|
||||||
|
} finally {
|
||||||
|
loadmore.loading = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
loadmore.loading = false;
|
||||||
|
loadmore.limit = 30;
|
||||||
|
|
||||||
|
(async function loadinit(target = 16, requests = 4) {
|
||||||
|
for(let i = 0; (i < requests) && (all_reports.length < target); ++i) await loadmore();
|
||||||
|
})();
|
||||||
|
|
||||||
|
const observer = new IntersectionObserver((entries) => { if((!query) && (entries[0].isIntersecting)) loadmore(null); }, { root: null, rootMargin: '0px', threshold: 0.5 });
|
||||||
|
let bottom = null;
|
||||||
|
$: {
|
||||||
|
observer.disconnect();
|
||||||
|
if(bottom) observer.observe(bottom);
|
||||||
|
}
|
||||||
|
|
||||||
|
let reportlist;
|
||||||
|
async function scroll(selection) {
|
||||||
|
if(selection) {
|
||||||
|
await tick();
|
||||||
|
const el = reportlist.querySelector('.active');
|
||||||
|
if((el) && (!isInViewport(el, true))) el.scrollIntoView({ block: 'center' });
|
||||||
|
} else {
|
||||||
|
const items = reportlist.children;
|
||||||
|
for(let i = 0, el; i < items.length; ++i) if(isInViewport(el = items[i])) {
|
||||||
|
await tick();
|
||||||
|
el.scrollIntoView({ block: 'start' });
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$: if(reportlist) scroll(selection);
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<svelte:head>
|
||||||
|
<title>Orders</title>
|
||||||
|
</svelte:head>
|
||||||
|
|
||||||
|
{#if selection}
|
||||||
|
<div class="halfpane rightpane">
|
||||||
|
<nav class="navbar bg-body-secondary">
|
||||||
|
<div class="container-fluid">
|
||||||
|
<span class="navbar-brand">{selection.text || ''}</span>
|
||||||
|
<button type="button" class="btn btn-outline-light" on:click={() => selection = null}>❌</button>
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
<div class="container-fluid"><dl class="report">{#each Object.entries(selection) as entry}{#if entry[0].charAt(0) != '_'}<dt>{entry[0]}</dt><dd>{entry[1]}</dd>{/if}{/each}</dl></div>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
<div class={selection ? 'halfpane leftpane' : ''}>
|
||||||
|
<div class="card {selection ? '' : 'mb-3 shadow'}">
|
||||||
|
<nav class="navbar bg-body-tertiary">
|
||||||
|
<form class="container-fluid">
|
||||||
|
<div class="input-group">
|
||||||
|
<span class="input-group-text">Orders</span>
|
||||||
|
<input type="text" class="form-control" placeholder="Filter..." bind:value={query}>
|
||||||
|
{#if query}<button type="button" class="btn btn-outline-secondary" on:click={() => query = ''}>❌</button>{/if}
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</nav>
|
||||||
|
<ul class="list-group list-group-flush" bind:this={reportlist}>
|
||||||
|
{#if pattern}
|
||||||
|
{#each all_reports as row}
|
||||||
|
{#if filter_test(pattern, row._content)}
|
||||||
|
<li class="list-group-item" class:active={(selection) && (selection._uid == row._uid)} on:click={() => selection = selection !== row ? row : null}>
|
||||||
|
<div class="singleline" style="font-weight: bold;">{row.text || ''}</div>
|
||||||
|
<div class="report">{@html pattern ? filter_mark(pattern, escapeHTML(row.body)) : escapeHTML(row.body)}</div>
|
||||||
|
</li>
|
||||||
|
{/if}
|
||||||
|
{/each}
|
||||||
|
{:else}
|
||||||
|
{#each all_reports as row}
|
||||||
|
<li class="list-group-item" class:active={(selection) && (selection._uid == row._uid)} on:click={() => selection = selection !== row ? row : null}>
|
||||||
|
<div class="singleline" style="font-weight: bold;">{row.text || ''}</div>
|
||||||
|
<div class="report">{@html pattern ? filter_mark(pattern, escapeHTML(row.body)) : escapeHTML(row.body)}</div>
|
||||||
|
</li>
|
||||||
|
{/each}
|
||||||
|
{/if}
|
||||||
|
<li class="list-group-item" style="padding: 0;" bind:this={bottom}>{#if loadmore.loading}<button type="button" class="btn btn-primary w-100" disabled>Loading...</button>{:else}<button type="button" class="btn btn-primary w-100" on:click={loadmore}>Load more</button>{/if}</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
:global(div.report mark) {
|
||||||
|
padding: 0;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
@media (prefers-reduced-motion: no-preference) {
|
||||||
|
:root {
|
||||||
|
scroll-behavior: auto;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.navbar {
|
||||||
|
position: sticky;
|
||||||
|
z-index: 1020;
|
||||||
|
top: 3.5rem;
|
||||||
|
}
|
||||||
|
.leftpane {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
li.active {
|
||||||
|
scroll-margin-top: 3.5rem;
|
||||||
|
}
|
||||||
|
div.singleline {
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
.report {
|
||||||
|
font-family: monospace;
|
||||||
|
white-space: pre-wrap;
|
||||||
|
}
|
||||||
|
@media screen and (min-width: 720px) {
|
||||||
|
.halfpane {
|
||||||
|
position: absolute;
|
||||||
|
top: 3.5rem;
|
||||||
|
bottom: 0;
|
||||||
|
width: 50%;
|
||||||
|
overflow: auto;
|
||||||
|
}
|
||||||
|
.leftpane {
|
||||||
|
display: block;
|
||||||
|
width: 33%;
|
||||||
|
left: 0;
|
||||||
|
z-index: -1;
|
||||||
|
direction: rtl;
|
||||||
|
}
|
||||||
|
.leftpane > * {
|
||||||
|
direction: ltr;
|
||||||
|
}
|
||||||
|
.rightpane {
|
||||||
|
width: 67%;
|
||||||
|
right: 0;
|
||||||
|
box-shadow: var(--bs-box-shadow);
|
||||||
|
}
|
||||||
|
.halfpane .navbar {
|
||||||
|
top: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
10
frontend/src/routes/clinics/+page.js
Normal file
10
frontend/src/routes/clinics/+page.js
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
/** @type {import('./$types').PageLoad} */
|
||||||
|
export async function load({ params, fetch }) {
|
||||||
|
let clinics = await (await fetch('/api/clinic/list')).json();
|
||||||
|
clinics.reduce((acc, item) => (acc[item.name] = item, acc), clinics);
|
||||||
|
let selection = await (await fetch('/api/config/user/clinics')).json();
|
||||||
|
selection.forEach(x => clinics[x] ? clinics[x].active = true : false);
|
||||||
|
return {
|
||||||
|
clinics
|
||||||
|
};
|
||||||
|
}
|
45
frontend/src/routes/clinics/+page.svelte
Normal file
45
frontend/src/routes/clinics/+page.svelte
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
<script>
|
||||||
|
export let data;
|
||||||
|
let filter = '';
|
||||||
|
|
||||||
|
$: filter = filter.toUpperCase();
|
||||||
|
$: selection = data.clinics.filter(row => row.active);
|
||||||
|
$: {
|
||||||
|
fetch('/api/config/user/clinics', { method: 'PUT', headers: { 'Content-type': 'application/json' }, body: JSON.stringify(selection.map(row => row.name)) });
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<svelte:head>
|
||||||
|
<title>Clinics</title>
|
||||||
|
</svelte:head>
|
||||||
|
|
||||||
|
<h1>Clinics</h1>
|
||||||
|
<div class="card">
|
||||||
|
<div class="input-group">
|
||||||
|
<span class="input-group-text">🔎</span>
|
||||||
|
<input type="text" class="form-control" placeholder="Clinic" bind:value={filter} />
|
||||||
|
</div>
|
||||||
|
{#if filter.length > 0}
|
||||||
|
<ul class="list-group list-group-flush">
|
||||||
|
{#each data.clinics as row}{#if (row.name.charAt(0) != 'Z') && (row.name != 'DELETED CLINIC') && (row.name != 'CLINIC DELETED') && (row.name.startsWith(filter))}<li class="list-group-item" class:active={row.active} on:click={evt => row.active = !row.active}>{row.name}</li>{/if}{/each}
|
||||||
|
</ul>
|
||||||
|
{/if}
|
||||||
|
{#if selection.length > 0}
|
||||||
|
<div class="card-footer">
|
||||||
|
{#each selection as row}<span class="badge text-bg-primary">{row.name} <span on:click={evt => data.clinics[row.name].active = false}>❌</span></span>{/each}
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
.list-group {
|
||||||
|
max-height: 50vh;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
.list-group-item {
|
||||||
|
cursor: default;
|
||||||
|
}
|
||||||
|
.card-footer .badge:not(:last-child) {
|
||||||
|
margin-right: 0.25em;
|
||||||
|
}
|
||||||
|
</style>
|
23
frontend/src/routes/lookup/+page.js
Normal file
23
frontend/src/routes/lookup/+page.js
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
import { redirect } from '@sveltejs/kit';
|
||||||
|
import { get_api_lookup } from '$lib/backend.js';
|
||||||
|
|
||||||
|
/** @type {import('./$types').PageLoad} */
|
||||||
|
export async function load({ url, fetch }) {
|
||||||
|
let query = (url.searchParams.get('q') || '').replace(/^\s+|\s+$/g, '').replace(/\s+/g, ' ');
|
||||||
|
let ordinal = parseInt(url.searchParams.get('ordinal') || '');
|
||||||
|
let name = (url.searchParams.get('name') || '').replace(/^\s+|\s+$/g, '').replace(/\s+/g, ' ').toUpperCase();
|
||||||
|
let items = query ? await get_api_lookup({ fetch, query }) : [];
|
||||||
|
if(ordinal) items = items.filter(row => row.ordinal == ordinal);
|
||||||
|
else if(name) items = items.filter(row => row.name.startsWith(name));
|
||||||
|
let detail, match;
|
||||||
|
if((items.length == 1) && (url.searchParams.get('rd'))) {
|
||||||
|
detail = await get_api_lookup({ fetch, query, ordinal: (items[0].ordinal || '0'), force: url.searchParams.get('force') });
|
||||||
|
if(match = /(^[^\r\n;]+);(?:\([^\)]*?\))? (?:(\d+) )?(\d{3}-\d{2}-\d{4}P?) (.+?)\s*$/m.exec(detail)) {
|
||||||
|
if(match[2]) throw redirect(302, '/chart/' + match[2]);
|
||||||
|
if(match[3]) throw redirect(302, '/chart/' + match[3].replace(/[^\dP]/g, ''));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
query, ordinal, name, items, detail
|
||||||
|
};
|
||||||
|
}
|
62
frontend/src/routes/lookup/+page.svelte
Normal file
62
frontend/src/routes/lookup/+page.svelte
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
<script>
|
||||||
|
import { tick } from 'svelte';
|
||||||
|
import { page } from '$app/stores';
|
||||||
|
export let data;
|
||||||
|
|
||||||
|
let ref = null;
|
||||||
|
|
||||||
|
$: tick().then(() => ref ? ref.focus() : null);
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<svelte:head>
|
||||||
|
<title>Lookup</title>
|
||||||
|
</svelte:head>
|
||||||
|
|
||||||
|
<h1>Lookup</h1>
|
||||||
|
<div class="card mb-3 shadow">
|
||||||
|
<form method="get" action="?">
|
||||||
|
<div class="input-group">
|
||||||
|
<span class="input-group-text">🔎</span>
|
||||||
|
<input type="text" class="form-control" placeholder="Lookup" name="q" bind:value={data.query} bind:this={ref} />
|
||||||
|
<button type="submit" class="btn btn-primary">Search</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
{#if data.items.length > 0}
|
||||||
|
<table class="table" data-sveltekit-preload-data="tap">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th scope="col">Name</th>
|
||||||
|
<th scope="col">DOB</th>
|
||||||
|
<th scope="col">SSN</th>
|
||||||
|
<th scope="col"></th>
|
||||||
|
<th scope="col">Type</th>
|
||||||
|
<th scope="col"></th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{#each data.items as row}
|
||||||
|
<tr>
|
||||||
|
<td><a href="/lookup?q={data.query}&ordinal={row.ordinal || 0}&rd=true">{row.name}{#if row.alias}{' (' + row.alias + ')'}{/if}</a></td>
|
||||||
|
<td>{row.dob}</td>
|
||||||
|
<td>{row.ssn}</td>
|
||||||
|
<td>{row.yesno}</td>
|
||||||
|
<td>{row.type}</td>
|
||||||
|
<td>{row.no}</td>
|
||||||
|
</tr>
|
||||||
|
{/each}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
{#if (data.items.length) == 1 && (data.detail)}
|
||||||
|
<div class="card-body">
|
||||||
|
<pre class="card-text">{data.detail}</pre>
|
||||||
|
<p class="card-text"><a class="btn btn-danger" href="/lookup?q={data.query}&ordinal={data.items[0].ordinal || 0}&rd=true&force=true">Proceed to {data.items[0].name}</a></p>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
.card table.table {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
</style>
|
10
frontend/src/routes/rcrs/+page.js
Normal file
10
frontend/src/routes/rcrs/+page.js
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
import { get_api_rcrs_patients } from '$lib/backend.js';
|
||||||
|
|
||||||
|
/** @type {import('./$types').PageLoad} */
|
||||||
|
export async function load({ params, fetch }) {
|
||||||
|
let offset = 365;
|
||||||
|
let reports = await get_api_rcrs_patients({ fetch, alpha: 'T-' + offset, omega: 'N' });
|
||||||
|
return {
|
||||||
|
reports, offset
|
||||||
|
};
|
||||||
|
}
|
491
frontend/src/routes/rcrs/+page.svelte
Normal file
491
frontend/src/routes/rcrs/+page.svelte
Normal file
@ -0,0 +1,491 @@
|
|||||||
|
<script>
|
||||||
|
import { tick } from 'svelte';
|
||||||
|
import { debounce, escapeHTML, escapeRegExp, strHashHSL, isInViewport, filter_pattern, filter_test, filter_mark } from '$lib/util.js';
|
||||||
|
import { get_api_rcrs_patients } from '$lib/backend.js';
|
||||||
|
|
||||||
|
export let data;
|
||||||
|
|
||||||
|
const sitecodes = {
|
||||||
|
'C00.0': 'External lip upper',
|
||||||
|
'C00.1': 'External lip lower',
|
||||||
|
'C00.2': 'External lip NOS',
|
||||||
|
'C00.3': 'Upper lip, mucosa',
|
||||||
|
'C00.4': 'Lower lip, mucosa',
|
||||||
|
'C00.5': 'Mucosa lip, NOS',
|
||||||
|
'C00.6': 'Commissure lip',
|
||||||
|
'C00.8': 'Overlapping lesion of lip',
|
||||||
|
'C00.9': 'Lip, NOS',
|
||||||
|
'C01.9': 'Base of tongue, NOS',
|
||||||
|
'C02.0': 'Dorsal surface tongue, NOS',
|
||||||
|
'C02.1': 'Border of tongue',
|
||||||
|
'C02.2': 'Ventral surface of tongue NOS',
|
||||||
|
'C02.3': 'Anterior 2/3 of tongue NOS',
|
||||||
|
'C02.4': 'Lingual tonsil',
|
||||||
|
'C02.8': 'Overlapping lesion of tongue',
|
||||||
|
'C02.9': 'Tongue NOS',
|
||||||
|
'C03.0': 'Upper gum',
|
||||||
|
'C03.1': 'Lower gum',
|
||||||
|
'C03.9': 'Gum NOS',
|
||||||
|
'C04.0': 'Anterior floor of mouth',
|
||||||
|
'C04.1': 'Lateral floor of mouth',
|
||||||
|
'C04.8': 'Overlapping lesion of floor of mouth',
|
||||||
|
'C04.9': 'Floor of mouth NOS',
|
||||||
|
'C05.0': 'Hard palate',
|
||||||
|
'C05.1': 'Soft palate NOS (excludes Nasopharyngcal surface C11.3)',
|
||||||
|
'C05.2': 'Uvula',
|
||||||
|
'C05.8': 'Overlapping lesion of palate',
|
||||||
|
'C05.9': 'Palate NOS',
|
||||||
|
'C06.0': 'Cheek mucosa',
|
||||||
|
'C06.1': 'Vestibule of mouth',
|
||||||
|
'C06.2': 'Retromolar area',
|
||||||
|
'C06.8': 'Overlapping lesion of other and unspecified parts of mouth',
|
||||||
|
'C06.9': 'Mouth NOS',
|
||||||
|
'C07.9': 'Parotid gland',
|
||||||
|
'C08.0': 'Submaxillary gland',
|
||||||
|
'C08.1': 'Sublingual gland',
|
||||||
|
'C08.8': 'Overlapping lesion of major salivary glands',
|
||||||
|
'C08.9': 'Major salivary gland, NOS',
|
||||||
|
'C09.0': 'Tonsillar fossa',
|
||||||
|
'C09.1': 'Tonsillar pillar',
|
||||||
|
'C09.8': 'Overlapping lesion of tonsil',
|
||||||
|
'C09.9': 'Tonsil NOS (excludes Lingual tonsil C02.4 and Pharyngeal tonsil C11.1)',
|
||||||
|
'C10.0': 'Vallecula',
|
||||||
|
'C10.1': 'Anterior surface of epiglottis',
|
||||||
|
'C10.2': 'Lateral wall oropharynx',
|
||||||
|
'C10.3': 'Posterior wall oropharynx',
|
||||||
|
'C10.4': 'Branchial cleft (site of neoplosm)',
|
||||||
|
'C10.8': 'Overlapping lesion of oropharynx',
|
||||||
|
'C10.9': 'Oropharynx NOS',
|
||||||
|
'C11.0': 'Superior wall of nasopharynx',
|
||||||
|
'C11.1': 'Posterior wall nasopharynx',
|
||||||
|
'C11.2': 'Lateral wall nasopharynx',
|
||||||
|
'C11.3': 'Anterior wall nasopharynx',
|
||||||
|
'C11.8': 'Overlapping lesion of nasopharynx',
|
||||||
|
'C11.9': 'Nasopharynx NOS',
|
||||||
|
'C12.9': 'Pyriform sinus',
|
||||||
|
'C13.0': 'Postcricoid region',
|
||||||
|
'C13.1': 'Hypopharyngeal aspect of aryepiglottic fold',
|
||||||
|
'C13.2': 'Posterior wall hypopharynx',
|
||||||
|
'C13.8': 'Overlapping lesion of hypopharynx',
|
||||||
|
'C13.9': 'Hypopharynx, NOS',
|
||||||
|
'C14.0': 'Pharynx NOS',
|
||||||
|
'C14.2': 'Waldeyer\'s ring',
|
||||||
|
'C14.8': 'Overlapping lesion of lip, oral cavity and pharynx',
|
||||||
|
'C15.0': 'Cervical esophagus',
|
||||||
|
'C15.1': 'Thoracic esophagus',
|
||||||
|
'C15.2': 'Abdominal esophagus',
|
||||||
|
'C15.3': 'Upper third of esophagus',
|
||||||
|
'C15.4': 'Middle third of esophagus',
|
||||||
|
'C15.5': 'Esophagus lower third',
|
||||||
|
'C15.8': 'Overlapping lesion of esophagus',
|
||||||
|
'C15.9': 'Esophagus NOS',
|
||||||
|
'C16.0': 'Cardia, NOS',
|
||||||
|
'C16.1': 'Fundus stomach',
|
||||||
|
'C16.2': 'Body stomach',
|
||||||
|
'C16.3': 'Gastric antrum',
|
||||||
|
'C16.4': 'Pylorus',
|
||||||
|
'C16.5': 'Lesser curvature of stomach, NOS (not classifiable to C16.1 to C16.4)',
|
||||||
|
'C16.6': 'Greater curvature of stomach, NOS (not classifiable to C16.0 to C16.4)',
|
||||||
|
'C16.8': 'Overlapping lesion of stomach',
|
||||||
|
'C16.9': 'Stomach NOS',
|
||||||
|
'C17.0': 'Duodenum',
|
||||||
|
'C17.1': 'Jejunum',
|
||||||
|
'C17.2': 'Ileum (excludes ileocecal valve C18.0)',
|
||||||
|
'C17.3': 'Meckel\'s diverticulum (site of neoplasm)',
|
||||||
|
'C17.8': 'Overlapping lesion of small intestine',
|
||||||
|
'C17.9': 'Small intestine NOS',
|
||||||
|
'C18.0': 'Cecum',
|
||||||
|
'C18.1': 'Appendix',
|
||||||
|
'C18.2': 'Ascending colon',
|
||||||
|
'C18.3': 'Hepatic flexure of colon',
|
||||||
|
'C18.4': 'Transverse colon',
|
||||||
|
'C18.5': 'Splenic flexure of colon',
|
||||||
|
'C18.6': 'Descending colon',
|
||||||
|
'C18.7': 'Sigmoid colon',
|
||||||
|
'C18.8': 'Overlapping lesion of colon',
|
||||||
|
'C18.9': 'Colon NOS',
|
||||||
|
'C19.9': 'Rectosigmoid junction',
|
||||||
|
'C20.9': 'Rectum, NOS',
|
||||||
|
'C21.0': 'Anus, NOS (excludes Skin of anus and Perianal skin (C44.5)',
|
||||||
|
'C21.1': 'Anal canal',
|
||||||
|
'C21.2': 'Cloacogenic zone',
|
||||||
|
'C21.8': 'Overlapping lesion of rectum, anus and anal canal',
|
||||||
|
'C22.0': 'Liver',
|
||||||
|
'C22.1': 'Intrahepatic bile duct',
|
||||||
|
'C23.9': 'Gallbladder',
|
||||||
|
'C24.0': 'Extrahepatic bile duct',
|
||||||
|
'C24.1': 'Ampulla of Vater',
|
||||||
|
'C24.8': 'Overlapping lesion of biliary tract',
|
||||||
|
'C24.9': 'Biliary tract, NOS',
|
||||||
|
'C25.0': 'Head of pancreas',
|
||||||
|
'C25.1': 'Body pancreas',
|
||||||
|
'C25.2': 'Tail pancreas',
|
||||||
|
'C25.3': 'Pancreatic duct',
|
||||||
|
'C25.4': 'Islets of Langerhans',
|
||||||
|
'C25.7': 'Neck of pancreas',
|
||||||
|
'C25.8': 'Overlapping lesion of pancreas',
|
||||||
|
'C25.9': 'Pancreas NOS',
|
||||||
|
'C26.0': 'Intestinal tract, NOS',
|
||||||
|
'C26.8': 'Overlapping lesion of digestive system',
|
||||||
|
'C26.9': 'Gastrointestinal tract, NOS',
|
||||||
|
'C30.0': 'Nasal cavity (excludes Nose, NOS C76.0)',
|
||||||
|
'C30.1': 'Middle ear',
|
||||||
|
'C31.0': 'Maxillary sinus',
|
||||||
|
'C31.1': 'Ethmoid sinus',
|
||||||
|
'C31.2': 'Frontal sinus',
|
||||||
|
'C31.3': 'Sphenoid sinus',
|
||||||
|
'C31.8': 'Overlapping lesion of accessory sinuses',
|
||||||
|
'C31.9': 'Accessory sinus, NOS',
|
||||||
|
'C32.0': 'Glottis',
|
||||||
|
'C32.1': 'Supraglottis',
|
||||||
|
'C32.2': 'Subglottis',
|
||||||
|
'C32.3': 'Laryngeal cartilage',
|
||||||
|
'C32.8': 'Overlapping lesion of larynx',
|
||||||
|
'C32.9': 'Larynx NOS',
|
||||||
|
'C33.9': 'Trachea',
|
||||||
|
'C34.0': 'Main bronchus',
|
||||||
|
'C34.1': 'Upper lobe, lung',
|
||||||
|
'C34.2': 'Middle lobe, lung',
|
||||||
|
'C34.3': 'Lower lobe, lung',
|
||||||
|
'C34.8': 'Overlapping lesion of lung',
|
||||||
|
'C34.9': 'Lung NOS',
|
||||||
|
'C37.9': 'Thymus',
|
||||||
|
'C38.0': 'Heart',
|
||||||
|
'C38.1': 'Anterior mediastinum',
|
||||||
|
'C38.2': 'Posterior mediastinum',
|
||||||
|
'C38.3': 'Mediastinum NOS',
|
||||||
|
'C38.4': 'Pleura NOS',
|
||||||
|
'C38.8': 'Overlapping lesion of heart, mediastinum and pleura',
|
||||||
|
'C39.0': 'Upper respiratory tract, NOS',
|
||||||
|
'C39.8': 'Overlapping lesion of respiratory system and intrathoracic organs',
|
||||||
|
'C39.9': 'Respiratory tract, NOS',
|
||||||
|
'C40.0': 'Upper limb long bones, joints',
|
||||||
|
'C40.1': 'Upper limb short bones, joints',
|
||||||
|
'C40.3': 'Lower limb short bones, joints',
|
||||||
|
'C40.8': 'Overlapping lesion of bones, joints and articular cartilage of limbs',
|
||||||
|
'C40.9': 'Bone limb, NOS',
|
||||||
|
'C41.0': 'Skull and facial bone',
|
||||||
|
'C41.1': 'Mandible',
|
||||||
|
'C41.2': 'Vertebral column (excludes Sacrum and Coccyx C41.4)',
|
||||||
|
'C41.3': 'Rib, sternum, clavicle',
|
||||||
|
'C41.4': 'Pelvic bone',
|
||||||
|
'C41.8': 'Overlapping lesion of bones, joints and articular cartilage',
|
||||||
|
'C41.9': 'Bone NOS',
|
||||||
|
'C42.0': 'Blood',
|
||||||
|
'C42.1': 'Bone marrow',
|
||||||
|
'C42.2': 'Spleen',
|
||||||
|
'C42.3': 'Reticuloendothelial system, NOS',
|
||||||
|
'C42.4': 'Hematopoietic system, NOS',
|
||||||
|
'C44.0': 'Skin lip, NOS',
|
||||||
|
'C44.1': 'Eyelid NOS',
|
||||||
|
'C44.2': 'External ear',
|
||||||
|
'C44.3': 'Skin face',
|
||||||
|
'C44.4': 'Skin scalp, neck',
|
||||||
|
'C44.5': 'Skin trunk',
|
||||||
|
'C44.6': 'Skin limb, upper',
|
||||||
|
'C44.7': 'Skin limb, lower',
|
||||||
|
'C47.0': 'Peripheral nerve head, neck',
|
||||||
|
'C47.1': 'Peripheral nerve shoulder, arm',
|
||||||
|
'C47.2': 'Peripheral nerve leg',
|
||||||
|
'C47.3': 'Peripheral nerve thorax (excludes Thymus, Heart and Mediastinum C37. , C38. )',
|
||||||
|
'C47.4': 'Peripheral nerve abdomen',
|
||||||
|
'C47.5': 'Peripheral nerve pelvis',
|
||||||
|
'C47.6': 'Peripheral nerve trunk',
|
||||||
|
'C47.8': 'Overlapping lesion of peripheral nerves and autonomic nervous system',
|
||||||
|
'C47.9': 'Autonomic nervous system NOS',
|
||||||
|
'C48.0': 'Retroperitoneum',
|
||||||
|
'C48.1': 'Peritoneum',
|
||||||
|
'C48.2': 'Peritoneum NOS',
|
||||||
|
'C48.8': 'Overlapping lesion of retroperitoneum and peritoneum',
|
||||||
|
'C49.0': 'Connective tissue head',
|
||||||
|
'C49.1': 'Connective tissue arm',
|
||||||
|
'C49.2': 'Connective tissue leg',
|
||||||
|
'C49.3': 'Connective tissue thorax (excludes Thymus, Heart and Mediastinum C37. , C38. )',
|
||||||
|
'C49.4': 'Connective tissue abdomen',
|
||||||
|
'C49.5': 'Connective tissue pelvis',
|
||||||
|
'C49.6': 'Connective tissue trunk, NOS',
|
||||||
|
'C49.8': 'Overlapping lesion of connective, subcutaneous and other soft tissues',
|
||||||
|
'C49.9': 'Connective tissue NOS',
|
||||||
|
'C50.0': 'Nipple',
|
||||||
|
'C50.1': 'Central portion of breast',
|
||||||
|
'C50.2': 'Upper inner quadrant of breast',
|
||||||
|
'C50.3': 'Lower inner quadrant of breast',
|
||||||
|
'C50.4': 'Upper outer quadrant of breast',
|
||||||
|
'C50.5': 'Lower outer quadrant of breast',
|
||||||
|
'C50.6': 'Axillary tail of breast',
|
||||||
|
'C50.8': 'Overlapping lesion of breast',
|
||||||
|
'C50.9': 'Breast NOS (excludes Skin of breast C44.5)',
|
||||||
|
'C51.0': 'Labium majus',
|
||||||
|
'C51.1': 'Labium minus',
|
||||||
|
'C51.2': 'Clitoris',
|
||||||
|
'C51.8': 'Overlapping lesion of vulva',
|
||||||
|
'C51.9': 'Vulva, NOS',
|
||||||
|
'C52.9': 'Vagina, NOS',
|
||||||
|
'C53.0': 'Endocervix',
|
||||||
|
'C53.1': 'Exocervix',
|
||||||
|
'C53.8': 'Overlapping lesion of cervix uteri',
|
||||||
|
'C53.9': 'Cervix uteri',
|
||||||
|
'C54.0': 'Isthmus uteri',
|
||||||
|
'C54.1': 'Endometrium',
|
||||||
|
'C54.2': 'Myometrium',
|
||||||
|
'C54.3': 'Fundus uteri',
|
||||||
|
'C54.8': 'Overlapping lesion of corpus uteri',
|
||||||
|
'C54.9': 'Corpus uteri',
|
||||||
|
'C55.9': 'Uterus NOS',
|
||||||
|
'C56.9': 'Ovary',
|
||||||
|
'C57.0': 'Fallopian tube',
|
||||||
|
'C57.1': 'Broad ligament',
|
||||||
|
'C57.2': 'Round ligament',
|
||||||
|
'C57.3': 'Parametrium',
|
||||||
|
'C57.4': 'Uterine adnexa',
|
||||||
|
'C57.7': 'Wolffian body',
|
||||||
|
'C57.8': 'Overlapping lesion of female genital organs',
|
||||||
|
'C57.9': 'Female genital tract, NOS',
|
||||||
|
'C60.0': 'Prepuce',
|
||||||
|
'C60.1': 'Glans penis',
|
||||||
|
'C60.2': 'Body penis',
|
||||||
|
'C60.8': 'Overlapping lesion of penis',
|
||||||
|
'C60.9': 'Penis NOS',
|
||||||
|
'C61.9': 'Prostate gland',
|
||||||
|
'C62.0': 'Undescended testis (site of neoplasm)',
|
||||||
|
'C62.1': 'Descended testis',
|
||||||
|
'C62.9': 'Testis NOS',
|
||||||
|
'C63.0': 'Epididymis',
|
||||||
|
'C63.1': 'Spermatic cord',
|
||||||
|
'C63.2': 'Scrotum, NOS',
|
||||||
|
'C63.7': 'Tunica vaginalis',
|
||||||
|
'C63.8': 'Overlapping lesion of male genital organs',
|
||||||
|
'C63.9': 'Male genital organs, NOS',
|
||||||
|
'C64.9': 'Kidney NOS',
|
||||||
|
'C65.9': 'Renal pelvis',
|
||||||
|
'C66.9': 'Ureter',
|
||||||
|
'C67.0': 'Trigone, bladder',
|
||||||
|
'C67.1': 'Dome, bladder',
|
||||||
|
'C67.2': 'Lateral wall bladder',
|
||||||
|
'C67.4': 'Posterior wall bladder',
|
||||||
|
'C67.6': 'Ureteric orifice',
|
||||||
|
'C67.7': 'Urachus',
|
||||||
|
'C67.8': 'Overlapping lesion of bladder',
|
||||||
|
'C67.9': 'Bladder NOS',
|
||||||
|
'C68.0': 'Urethra',
|
||||||
|
'C68.1': 'Paraurethral gland',
|
||||||
|
'C68.8': 'Overlapping lesion of urinary organs',
|
||||||
|
'C68.9': 'Urinary system, NOS',
|
||||||
|
'C69.0': 'Conjunctiva',
|
||||||
|
'C69.1': 'Cornea, NOS',
|
||||||
|
'C69.2': 'Retina',
|
||||||
|
'C69.3': 'Choroid',
|
||||||
|
'C69.4': 'Ciliary body',
|
||||||
|
'C69.5': 'Lacrimal gland',
|
||||||
|
'C69.6': 'Orbit NOS',
|
||||||
|
'C69.8': 'Overlapping lesion of eye and adnexa',
|
||||||
|
'C69.9': 'Eye NOS',
|
||||||
|
'C70.0': 'Cerebral meninges',
|
||||||
|
'C70.1': 'Spinal meninges',
|
||||||
|
'C70.9': 'Meninges NOS',
|
||||||
|
'C71.0': 'Cerebrum',
|
||||||
|
'C71.1': 'Frontal lobe',
|
||||||
|
'C71.2': 'Temporal lobe',
|
||||||
|
'C71.3': 'Parietal lobe',
|
||||||
|
'C71.4': 'Occipital lobe',
|
||||||
|
'C71.5': 'Ventricle NOS',
|
||||||
|
'C71.6': 'Cerebellum, NOS',
|
||||||
|
'C71.7': 'Brain stem',
|
||||||
|
'C71.8': 'Overlapping lesion of brain',
|
||||||
|
'C71.9': 'Brain NOS',
|
||||||
|
'C72.0': 'Spinal cord',
|
||||||
|
'C72.1': 'Cauda equina',
|
||||||
|
'C72.2': 'Olfactory nerve',
|
||||||
|
'C72.3': 'Optic nerve',
|
||||||
|
'C72.4': 'Acoustic nerve',
|
||||||
|
'C72.5': 'Cranial nerve, NOS',
|
||||||
|
'C72.8': 'Overlapping lesion of brain and central nervous system',
|
||||||
|
'C72.9': 'Nervous system NOS',
|
||||||
|
'C73.9': 'Thyroid gland',
|
||||||
|
'C74.0': 'Adrenal gland cortex',
|
||||||
|
'C74.1': 'Adrenal gland medulla',
|
||||||
|
'C74.9': 'Adrenal gland NOS',
|
||||||
|
'C75.0': 'Parathyroid gland',
|
||||||
|
'C75.1': 'Pituitary gland',
|
||||||
|
'C75.2': 'Craniopharyngeal duct',
|
||||||
|
'C75.3': 'Pineal gland',
|
||||||
|
'C75.4': 'Carotid body',
|
||||||
|
'C75.5': 'Aortic body',
|
||||||
|
'C75.8': 'Overlapping lesion of endocrine glands and related structures',
|
||||||
|
'C75.9': 'Endocrine gland, NOS',
|
||||||
|
'C76.0': 'Head, face or neck NOS',
|
||||||
|
'C76.1': 'Thorax NOS',
|
||||||
|
'C76.2': 'Abdomen NOS',
|
||||||
|
'C76.3': 'Pelvis NOS',
|
||||||
|
'C76.4': 'Upper limb NOS',
|
||||||
|
'C76.5': 'Lower limb NOS',
|
||||||
|
'C76.7': 'Other illdefined sites',
|
||||||
|
'C76.8': 'Overlapping lesion of ill-defined sites',
|
||||||
|
'C77.0': 'Lymph node face, head ,neck',
|
||||||
|
'C77.1': 'Intrathoracic lymph node',
|
||||||
|
'C77.2': 'Intra-abdominal lymph nodes',
|
||||||
|
'C77.3': 'Lymph node axilla, arm',
|
||||||
|
'C77.4': 'Lymph node inguinal region, leg',
|
||||||
|
'C77.5': 'Lymph node pelvic',
|
||||||
|
'C77.8': 'Lymph nodes of multiple regions',
|
||||||
|
'C77.9': 'Lymph node NOS',
|
||||||
|
'C80.9': 'Unknown primary site'
|
||||||
|
};
|
||||||
|
|
||||||
|
let query = '', pattern = null, selection = null, all_reports = decorate(data.reports);
|
||||||
|
let debounced_pattern = debounce((/*query*/) => (pattern = query ? filter_pattern(escapeHTML(query)) : null), 200);
|
||||||
|
|
||||||
|
$: debounced_pattern(query); // argument `query` is for reactivity hinting only
|
||||||
|
|
||||||
|
function decorate(xs) {
|
||||||
|
return xs.map(x => Object.assign({ _content: escapeHTML(Object.values(x).join('\x00') + '\x00' + x.tumors.map(y => y.meta.primarySite + '\x00' + sitecodes[y.meta.primarySite]).join('\x00')), _uid: x.last5 + x.name + x.tumors.map(y => y.meta.primarySite).join(' ') }, x));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadmore(evt, factor = 1.5) {
|
||||||
|
if(loadmore.loading) return;
|
||||||
|
loadmore.loading = true;
|
||||||
|
try {
|
||||||
|
let reports = await get_api_rcrs_patients({ omega: 'T-' + (data.offset + 1), alpha: 'T-' + (data.offset += (loadmore.limit = (factor*loadmore.limit)|0)) });
|
||||||
|
Array.prototype.push.apply(all_reports, decorate(reports));
|
||||||
|
all_reports = all_reports; // reactivity hint
|
||||||
|
} finally {
|
||||||
|
loadmore.loading = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
loadmore.loading = false;
|
||||||
|
loadmore.limit = 30;
|
||||||
|
|
||||||
|
(async function loadinit(target = 16, requests = 4) {
|
||||||
|
for(let i = 0; (i < requests) && (all_reports.length < target); ++i) await loadmore();
|
||||||
|
})();
|
||||||
|
|
||||||
|
const observer = new IntersectionObserver((entries) => { if((!query) && (entries[0].isIntersecting)) loadmore(null); }, { root: null, rootMargin: '0px', threshold: 0.5 });
|
||||||
|
let bottom = null;
|
||||||
|
$: {
|
||||||
|
observer.disconnect();
|
||||||
|
if(bottom) observer.observe(bottom);
|
||||||
|
}
|
||||||
|
|
||||||
|
let reportlist;
|
||||||
|
async function scroll(selection) {
|
||||||
|
if(selection) {
|
||||||
|
await tick();
|
||||||
|
const el = reportlist.querySelector('.active');
|
||||||
|
if((el) && (!isInViewport(el, true))) el.scrollIntoView({ block: 'center' });
|
||||||
|
} else {
|
||||||
|
const items = reportlist.children;
|
||||||
|
for(let i = 0, el; i < items.length; ++i) if(isInViewport(el = items[i])) {
|
||||||
|
await tick();
|
||||||
|
el.scrollIntoView({ block: 'start' });
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$: if(reportlist) scroll(selection);
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<svelte:head>
|
||||||
|
<title>RCRS</title>
|
||||||
|
</svelte:head>
|
||||||
|
|
||||||
|
{#if selection}
|
||||||
|
<div class="halfpane rightpane">
|
||||||
|
<nav class="navbar bg-body-secondary">
|
||||||
|
<div class="container-fluid">
|
||||||
|
<span class="navbar-brand">{selection.last5} {selection.name}</span>
|
||||||
|
<button type="button" class="btn btn-outline-light" on:click={() => selection = null}>❌</button>
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
<div class="container-fluid"><dl class="report">{#each Object.entries(selection) as entry}{#if entry[0].charAt(0) != '_'}<dt>{entry[0]}</dt><dd>{typeof entry[1] == 'string' ? entry[1] : JSON.stringify(entry[1])}</dd>{/if}{/each}</dl></div>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
<div class={selection ? 'halfpane leftpane' : ''}>
|
||||||
|
<div class="card {selection ? '' : 'mb-3 shadow'}">
|
||||||
|
<nav class="navbar bg-body-tertiary">
|
||||||
|
<form class="container-fluid">
|
||||||
|
<div class="input-group">
|
||||||
|
<span class="input-group-text">RCRS</span>
|
||||||
|
<input type="text" class="form-control" placeholder="Filter..." bind:value={query}>
|
||||||
|
{#if query}<button type="button" class="btn btn-outline-secondary" on:click={() => query = ''}>❌</button>{/if}
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</nav>
|
||||||
|
<ul class="list-group list-group-flush" bind:this={reportlist}>
|
||||||
|
{#if pattern}
|
||||||
|
{#each all_reports as row}
|
||||||
|
{#if filter_test(pattern, row._content)}
|
||||||
|
<li class="list-group-item" class:active={(selection) && (selection._uid == row._uid)} on:click={() => selection = selection !== row ? row : null}>
|
||||||
|
<div class="singleline" style="font-weight: bold;">{row.last5} {row.name} {row.tumors.map(x => x.meta.primarySite).join(' ')}</div>
|
||||||
|
</li>
|
||||||
|
{/if}
|
||||||
|
{/each}
|
||||||
|
{:else}
|
||||||
|
{#each all_reports as row}
|
||||||
|
<li class="list-group-item" class:active={(selection) && (selection._uid == row._uid)} on:click={() => selection = selection !== row ? row : null}>
|
||||||
|
<div class="singleline" style="font-weight: bold;">{row.last5} {row.name} {row.tumors.map(x => x.meta.primarySite).join(' ')}</div>
|
||||||
|
</li>
|
||||||
|
{/each}
|
||||||
|
{/if}
|
||||||
|
<li class="list-group-item" style="padding: 0;" bind:this={bottom}>{#if loadmore.loading}<button type="button" class="btn btn-primary w-100" disabled>Loading...</button>{:else}<button type="button" class="btn btn-primary w-100" on:click={loadmore}>Load more</button>{/if}</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
:global(div.report mark) {
|
||||||
|
padding: 0;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
@media (prefers-reduced-motion: no-preference) {
|
||||||
|
:root {
|
||||||
|
scroll-behavior: auto;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.navbar {
|
||||||
|
position: sticky;
|
||||||
|
z-index: 1020;
|
||||||
|
top: 3.5rem;
|
||||||
|
}
|
||||||
|
.leftpane {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
li.active {
|
||||||
|
scroll-margin-top: 3.5rem;
|
||||||
|
}
|
||||||
|
div.singleline {
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
}
|
||||||
|
.report {
|
||||||
|
font-family: monospace;
|
||||||
|
white-space: pre-wrap;
|
||||||
|
}
|
||||||
|
@media screen and (min-width: 720px) {
|
||||||
|
.halfpane {
|
||||||
|
position: absolute;
|
||||||
|
top: 3.5rem;
|
||||||
|
bottom: 0;
|
||||||
|
width: 50%;
|
||||||
|
overflow: auto;
|
||||||
|
}
|
||||||
|
.leftpane {
|
||||||
|
display: block;
|
||||||
|
width: 33%;
|
||||||
|
left: 0;
|
||||||
|
z-index: -1;
|
||||||
|
direction: rtl;
|
||||||
|
}
|
||||||
|
.leftpane > * {
|
||||||
|
direction: ltr;
|
||||||
|
}
|
||||||
|
.rightpane {
|
||||||
|
width: 67%;
|
||||||
|
right: 0;
|
||||||
|
box-shadow: var(--bs-box-shadow);
|
||||||
|
}
|
||||||
|
.halfpane .navbar {
|
||||||
|
top: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
117
frontend/src/routes/test/+page.svelte
Normal file
117
frontend/src/routes/test/+page.svelte
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
<script>
|
||||||
|
const id_detail = Math.random().toString(36).replace('0.', 'id-');
|
||||||
|
let innerWidth = 0;
|
||||||
|
$: wide = innerWidth > 768;
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<svelte:window bind:innerWidth />
|
||||||
|
|
||||||
|
<button class="btn btn-primary" type="button" data-bs-toggle="offcanvas" data-bs-target="#{id_detail}" aria-controls="{id_detail}">Enable both scrolling & backdrop</button>
|
||||||
|
|
||||||
|
<div class="offcanvas" class:offcanvas-end={wide} class:offcanvas-bottom={!wide} data-bs-scroll="true" tabindex="-1" id="{id_detail}" aria-labelledby="{id_detail}-label">
|
||||||
|
<div class="offcanvas-header">
|
||||||
|
<h5 class="offcanvas-title" id="{id_detail}-label">Backdrop with scrolling</h5>
|
||||||
|
<button type="button" class="btn-close" data-bs-dismiss="offcanvas" aria-label="Close"></button>
|
||||||
|
</div>
|
||||||
|
<div class="offcanvas-body">
|
||||||
|
<p>Try scrolling the rest of the page to see this option in action.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="card mb-3 shadow">
|
||||||
|
<ul class="list-group list-group-flush">
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item">Vestibulum at eros</li>
|
||||||
|
<li class="list-group-item">Cras justo odio</li>
|
||||||
|
<li class="list-group-item">Dapibus ac facilisis in</li>
|
||||||
|
<li class="list-group-item"><a data-bs-toggle="offcanvas" href="#{id_detail}">Vestibulum at eros</a></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
/*
|
||||||
|
.card {
|
||||||
|
position: absolute;
|
||||||
|
top: 0;
|
||||||
|
bottom: 0;
|
||||||
|
margin-top: 8rem;
|
||||||
|
overflow: auto;
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
</style>
|
BIN
frontend/static/favicon.png
Normal file
BIN
frontend/static/favicon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.5 KiB |
10
frontend/svelte.config.js
Normal file
10
frontend/svelte.config.js
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
import adapter from '@sveltejs/adapter-static';
|
||||||
|
|
||||||
|
/** @type {import('@sveltejs/kit').Config} */
|
||||||
|
const config = {
|
||||||
|
kit: {
|
||||||
|
adapter: adapter({ fallback: '200.html' })
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export default config;
|
6
frontend/vite.config.js
Normal file
6
frontend/vite.config.js
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
import { sveltekit } from '@sveltejs/kit/vite';
|
||||||
|
import { defineConfig } from 'vite';
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
plugins: [sveltekit()]
|
||||||
|
});
|
183
kvstore.py
Normal file
183
kvstore.py
Normal file
@ -0,0 +1,183 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import re
|
||||||
|
import uuid
|
||||||
|
import time
|
||||||
|
import json
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
from typing import Optional, Union, Sequence
|
||||||
|
|
||||||
|
class KVStore(object):
|
||||||
|
"""Provide a generic key-value store backed by SQLite"""
|
||||||
|
def __init__(self, database: Union[str, sqlite3.Connection]=':memory:'):
|
||||||
|
self.db = database if isinstance(database, sqlite3.Connection) else sqlite3.connect(database)
|
||||||
|
def put(self, obj):
|
||||||
|
raise NotImplementedError
|
||||||
|
def get(self, uid):
|
||||||
|
raise KeyError
|
||||||
|
|
||||||
|
class KVStoreView(object):
|
||||||
|
"""Hold a reference to a view"""
|
||||||
|
def __init__(self, store: KVStore, view_name: str, schema_name: str, attributes: Sequence[str]):
|
||||||
|
self.store = store
|
||||||
|
self.view_name = view_name
|
||||||
|
self.schema_name = schema_name
|
||||||
|
def put(self, obj, **kw):
|
||||||
|
"""Insert an object"""
|
||||||
|
return self.store.put(obj, schema=self.schema_name, **kw)
|
||||||
|
def get(self, uid, **kw):
|
||||||
|
"""Retrieve an object, with optional default"""
|
||||||
|
res = self.store.get(uid, **kw)
|
||||||
|
if 'schema' in res and res['schema'] != self.schema_name:
|
||||||
|
raise KeyError(uid)
|
||||||
|
def delete(self, where: str, params: Sequence):
|
||||||
|
"""Remove underlying data"""
|
||||||
|
return self.store.view_delete(self.view_name, where, params)
|
||||||
|
|
||||||
|
class KVStoreEAV(KVStore):
|
||||||
|
"""Provide a key-value store backed by SQLite with EAV pattern"""
|
||||||
|
def __init__(self, database: Union[str, sqlite3.Connection]=':memory:', table_name: str='eavstore'):
|
||||||
|
KVStore.__init__(self, database)
|
||||||
|
self.table_name = table_name
|
||||||
|
self.table_name_q = quote_identifier(table_name)
|
||||||
|
self.entity_column_name = 'uid'
|
||||||
|
self.attribute_column_name = '__attribute__'
|
||||||
|
self.value_column_name = '__value__'
|
||||||
|
self.data_attribute_name = '__data__'
|
||||||
|
self.db.execute(f'CREATE TABLE IF NOT EXISTS {self.table_name_q} ({self.entity_column_name} TEXT, {self.attribute_column_name} TEXT, {self.value_column_name} INTEGER)')
|
||||||
|
self.db.execute(f'CREATE INDEX IF NOT EXISTS {quote_identifier(f"idx_{table_name}_entity")} on {self.table_name_q} ({self.entity_column_name})')
|
||||||
|
self.db.execute(f'CREATE UNIQUE INDEX IF NOT EXISTS {quote_identifier(f"idx_{table_name}_attribute")} on {self.table_name_q} ({self.entity_column_name}, {self.attribute_column_name})')
|
||||||
|
self.db.execute(f'CREATE UNIQUE INDEX IF NOT EXISTS {quote_identifier(f"idx_{table_name}_value")} on {self.table_name_q} ({self.entity_column_name}, {self.attribute_column_name}, {self.value_column_name})')
|
||||||
|
def put(self, obj, **kw):
|
||||||
|
"""Insert an object"""
|
||||||
|
if 'uid' in obj:
|
||||||
|
self.db.execute(f'DELETE FROM {self.table_name_q} WHERE {self.entity_column_name}=?', (obj['uid'],))
|
||||||
|
else:
|
||||||
|
obj['uid'] = uuid.uuid4().hex
|
||||||
|
if 'ts' not in obj:
|
||||||
|
obj['ts'] = time.time()
|
||||||
|
if 'schema' in kw:
|
||||||
|
obj['schema'] = kw['schema']
|
||||||
|
uid = obj['uid']
|
||||||
|
self.db.executemany(f'INSERT INTO {self.table_name_q} ({self.entity_column_name}, {self.attribute_column_name}, {self.value_column_name}) VALUES (?, ?, ?)', ((uid, k, v) for k, v in obj.items() if k != 'uid'))
|
||||||
|
if self.data_attribute_name not in obj:
|
||||||
|
self.db.execute(f'INSERT INTO {self.table_name_q} ({self.entity_column_name}, {self.attribute_column_name}, {self.value_column_name}) VALUES (?, ?, ?)', (uid, self.data_attribute_name, json.dumps(obj, default=str, separators=(',', ':'))))
|
||||||
|
return obj
|
||||||
|
def get(self, uid, **kw):
|
||||||
|
"""Retrieve an object, with optional default"""
|
||||||
|
obj = {k: v for k, v in self.db.execute(f'SELECT {self.attribute_column_name}, {self.value_column_name} FROM {self.table_name_q} WHERE {self.entity_column_name}=?', (uid,))}
|
||||||
|
if len(obj) > 0:
|
||||||
|
obj['uid'] = uid
|
||||||
|
return obj
|
||||||
|
elif 'default' in kw:
|
||||||
|
return kw['default']
|
||||||
|
else:
|
||||||
|
raise KeyError(uid)
|
||||||
|
def ensure_view(self, view_name: str, schema_name: str, attributes: Sequence[str], clear: bool=False) -> KVStoreView:
|
||||||
|
"""Create a view over the key-value schema, optionally removing existing underlying data"""
|
||||||
|
self.db.execute(f'DROP VIEW IF EXISTS {quote_identifier(view_name)}')
|
||||||
|
self.db.execute(eav_query_ensure_view(view_name, self.table_name, self.entity_column_name, self.attribute_column_name, self.value_column_name, schema_name, attributes))
|
||||||
|
if clear:
|
||||||
|
self.db.execute(f'DELETE FROM {self.table_name_q} WHERE {self.entity_column_name} IN (SELECT {self.entity_column_name} FROM {quote_identifier(view_name)} WHERE schema=?)', (schema_name,))
|
||||||
|
return KVStoreView(self, view_name, schema_name, attributes)
|
||||||
|
def view_delete(self, view_name: str, where: str, params: Sequence):
|
||||||
|
"""Remove underlying data"""
|
||||||
|
self.db.execute(f'DELETE FROM {self.table_name_q} WHERE {self.entity_column_name} IN (SELECT {self.entity_column_name} FROM {quote_identifier(view_name)} WHERE ' + where + ')', params)
|
||||||
|
|
||||||
|
class KVStoreJSON(KVStore):
|
||||||
|
"""Provide a key-value store backed by SQLite with JSON rows"""
|
||||||
|
def __init__(self, database: Union[str, sqlite3.Connection]=':memory:', table_name: str='jsonstore'):
|
||||||
|
KVStore.__init__(self, database)
|
||||||
|
self.table_name = table_name
|
||||||
|
self.table_name_q = quote_identifier(table_name)
|
||||||
|
self.uid_column_name = 'uid'
|
||||||
|
self.data_column_name = '__data__'
|
||||||
|
self.db.execute(f'CREATE TABLE IF NOT EXISTS {self.table_name_q} ({self.uid_column_name} TEXT, {self.data_column_name} JSON)')
|
||||||
|
self.db.execute(f'CREATE UNIQUE INDEX IF NOT EXISTS {quote_identifier(f"idx_{table_name}_uid")} ON {self.table_name_q} ({self.uid_column_name})')
|
||||||
|
self.db.execute(f'CREATE INDEX IF NOT EXISTS {quote_identifier(f"idx_{table_name}_ts")} ON {self.table_name_q} (JSON_EXTRACT({self.data_column_name}, "$.ts"))')
|
||||||
|
self.db.execute(f'CREATE INDEX IF NOT EXISTS {quote_identifier(f"idx_{table_name}_schema")} ON {self.table_name_q} (JSON_EXTRACT({self.data_column_name}, "$.schema"))')
|
||||||
|
def put(self, obj, **kw):
|
||||||
|
"""Insert an object"""
|
||||||
|
obj = obj.copy()
|
||||||
|
if 'uid' not in obj:
|
||||||
|
obj['uid'] = uuid.uuid4().hex
|
||||||
|
if 'ts' not in obj:
|
||||||
|
obj['ts'] = time.time()
|
||||||
|
if 'schema' in kw:
|
||||||
|
obj['schema'] = kw['schema']
|
||||||
|
self.db.execute(f'INSERT INTO {self.table_name_q} (uid, {self.data_column_name}) VALUES (?, JSON(?))', (obj['uid'], json.dumps(obj, default=str, separators=(',', ':'))))
|
||||||
|
return obj
|
||||||
|
def get(self, uid, **kw):
|
||||||
|
"""Retrieve an object, with optional default"""
|
||||||
|
for row in self.db.execute(f'SELECT {self.data_column_name} FROM {self.table_name_q} WHERE uid=? LIMIT 1', (uid,)):
|
||||||
|
return json.loads(row[0])
|
||||||
|
if 'default' in kw:
|
||||||
|
return kw['default']
|
||||||
|
else:
|
||||||
|
raise KeyError(uid)
|
||||||
|
def ensure_view(self, view_name: str, schema_name: str, attributes: Sequence[str], clear: bool=False) -> KVStoreView:
|
||||||
|
"""Create a view over the key-value schema, optionally removing existing underlying data"""
|
||||||
|
self.db.execute(f'DROP VIEW IF EXISTS {quote_identifier(view_name)}')
|
||||||
|
self.db.execute(json_query_ensure_view(view_name, self.table_name, self.uid_column_name, self.data_column_name, schema_name, attributes))
|
||||||
|
if clear:
|
||||||
|
self.db.execute(f'DELETE FROM {self.table_name_q} WHERE JSON_EXTRACT({self.data_column_name}, "$.schema")=?', (schema_name,))
|
||||||
|
return KVStoreView(self, view_name, schema_name, attributes)
|
||||||
|
def view_delete(self, view_name: str, where: str, params: Sequence):
|
||||||
|
"""Remove underlying data"""
|
||||||
|
self.db.execute(f'DELETE FROM {self.table_name_q} WHERE {self.uid_column_name} IN (SELECT {self.uid_column_name} FROM {quote_identifier(view_name)} WHERE ' + where + ')', params)
|
||||||
|
|
||||||
|
def quote_string(s: str) -> str:
|
||||||
|
"""Make string safe for SQLite"""
|
||||||
|
return '"' + s.replace('"', '""') + '"'
|
||||||
|
|
||||||
|
def quote_identifier(s: str) -> str:
|
||||||
|
"""Make string safe as SQLite identifier"""
|
||||||
|
if re.match(r'^[A-Za-z][0-9A-Za-z_]*$', s):
|
||||||
|
return s
|
||||||
|
else:
|
||||||
|
return '"' + s.replace('"', '""') + '"'
|
||||||
|
|
||||||
|
def eav_query_pivot(table_name: str, entity_column_name: str, attribute_column_name: str, value_column_name: str, attributes: Sequence[str]) -> str:
|
||||||
|
"""Group EAV rows into traditional rows"""
|
||||||
|
if entity_column_name in attributes:
|
||||||
|
attributes = list(attributes)
|
||||||
|
attributes.remove(entity_column_name)
|
||||||
|
q = f'SELECT {quote_identifier(entity_column_name)}\n'
|
||||||
|
for attr in attributes:
|
||||||
|
q += f', MAX(CASE WHEN {quote_identifier(attribute_column_name)}={quote_string(attr)} THEN {quote_identifier(value_column_name)} END) {quote_identifier(attr)}\n'
|
||||||
|
q += f'FROM {quote_identifier(table_name)} GROUP BY {quote_identifier(entity_column_name)}'
|
||||||
|
return q
|
||||||
|
|
||||||
|
def eav_query_ensure_view(view_name: str, table_name: str, entity_column_name: str, attribute_column_name: str, value_column_name: str, schema_name: str, attributes: Sequence[str]) -> str:
|
||||||
|
"""Generate SQL to create a view over the grouped EAV data"""
|
||||||
|
if 'schema' not in attributes:
|
||||||
|
attributes = tuple(attributes) + ('schema',)
|
||||||
|
if entity_column_name in attributes:
|
||||||
|
attributes = list(attributes)
|
||||||
|
attributes.remove(entity_column_name)
|
||||||
|
return f'CREATE TEMPORARY VIEW IF NOT EXISTS {quote_identifier(view_name)} ({quote_identifier(entity_column_name)}, ' +\
|
||||||
|
', '.join(quote_identifier(attr) for attr in attributes) + ') AS\n' +\
|
||||||
|
eav_query_pivot(table_name, entity_column_name, attribute_column_name, value_column_name, attributes) +\
|
||||||
|
f'\nHAVING schema={quote_string(schema_name)}'
|
||||||
|
|
||||||
|
def json_query_pivot(table_name: str, uid_column_name: str, data_column_name: str, attributes: Sequence[str]) -> str:
|
||||||
|
"""Expand JSON rows into traditional rows"""
|
||||||
|
if uid_column_name in attributes:
|
||||||
|
attributes = list(attributes)
|
||||||
|
attributes.remove(uid_column_name)
|
||||||
|
return f'SELECT {quote_identifier(uid_column_name)}, ' +\
|
||||||
|
', '.join(f'JSON_EXTRACT({quote_identifier(data_column_name)}, {quote_identifier("$." + attr)}) AS {quote_identifier(attr)}' for attr in attributes) +\
|
||||||
|
f' FROM {quote_identifier(table_name)}'
|
||||||
|
|
||||||
|
def json_query_ensure_view(view_name: str, table_name: str, uid_column_name: str, data_column_name: str, schema_name: str, attributes: Sequence[str]) -> str:
|
||||||
|
"""Generate SQL to create a view over the expanded JSON data"""
|
||||||
|
if 'schema' not in attributes:
|
||||||
|
attributes = tuple(attributes) + ('schema',)
|
||||||
|
if uid_column_name in attributes:
|
||||||
|
attributes = list(attributes)
|
||||||
|
attributes.remove(uid_column_name)
|
||||||
|
return f'CREATE TEMPORARY VIEW IF NOT EXISTS {quote_identifier(view_name)} ({quote_identifier(uid_column_name)}, ' +\
|
||||||
|
', '.join(quote_identifier(attr) for attr in attributes) + ') AS\n' +\
|
||||||
|
json_query_pivot(table_name, uid_column_name, data_column_name, attributes) +\
|
||||||
|
f'\nWHERE JSON_EXTRACT({quote_identifier(data_column_name)}, {quote_identifier("$.schema")})={quote_string(schema_name)}'
|
248
main.py
Normal file
248
main.py
Normal file
@ -0,0 +1,248 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import getpass
|
||||||
|
import asyncio
|
||||||
|
import configparser
|
||||||
|
|
||||||
|
import autoproc
|
||||||
|
|
||||||
|
import ext_session
|
||||||
|
import ext_discovery
|
||||||
|
import ext_scheduling
|
||||||
|
import ext_patient
|
||||||
|
import ext_measurement
|
||||||
|
import ext_lab
|
||||||
|
import ext_note
|
||||||
|
import ext_order
|
||||||
|
import ext_rcrs
|
||||||
|
|
||||||
|
from typing import Optional, Union, Generator
|
||||||
|
|
||||||
|
import logging
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class Config(configparser.ConfigParser):
|
||||||
|
def __init__(self, *filenames, **kw):
|
||||||
|
configparser.ConfigParser.__init__(self, **kw)
|
||||||
|
self.filenames = filenames
|
||||||
|
self.reread()
|
||||||
|
def reread(self, *args, **kw) -> list:
|
||||||
|
configparser.ConfigParser.read(self, self.filenames, *args, **kw)
|
||||||
|
return self
|
||||||
|
def set(self, section: str, key: str, value: str):
|
||||||
|
target = configparser.ConfigParser()
|
||||||
|
target.read(self.filenames[-1])
|
||||||
|
if not target.has_section(section):
|
||||||
|
target.add_section(section)
|
||||||
|
target[section][key] = value
|
||||||
|
with open(self.filenames[-1], 'w+') as f:
|
||||||
|
target.write(f)
|
||||||
|
configparser.ConfigParser.read(self, self.filenames[-1])
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def stdin_reader(opts: dict) -> Generator[bytes, None, None]:
|
||||||
|
"""Read from stdin, filter through input_filter, and write into pipe"""
|
||||||
|
try:
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
while True:
|
||||||
|
if opts.get('stdin.echo', True): # input with line editor
|
||||||
|
yield (await loop.run_in_executor(None, sys.stdin.readline)).rstrip('\r\n')
|
||||||
|
else: # input hiding characters
|
||||||
|
yield await loop.run_in_executor(None, getpass.getpass, '')
|
||||||
|
except ConnectionResetError:
|
||||||
|
logger.info('ConnectionResetError: stdin_reader')
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
logger.info('KeyboardInterrupt: stdin_reader')
|
||||||
|
|
||||||
|
async def log_writer(proc, buffer):
|
||||||
|
with autoproc.subscribe(proc) as pipequeue:
|
||||||
|
while True:
|
||||||
|
buffer.write(await pipequeue.get())
|
||||||
|
|
||||||
|
import sqlite3, time, pickle
|
||||||
|
def memoize(database: Union[str, sqlite3.Connection]=':memory:', table: str='tempmemo', prefix: Optional[str]=None, ttl: float=86400, persist=False, cast=None):
|
||||||
|
db = database if isinstance(database, sqlite3.Connection) else sqlite3.connect(database)
|
||||||
|
db.execute(f'CREATE {"" if persist else "TEMPORARY "}TABLE IF NOT EXISTS {table} (uid TEXT PRIMARY KEY, result BLOB, expiry FLOAT)')
|
||||||
|
db.execute(f'CREATE UNIQUE INDEX IF NOT EXISTS idx_{table}_uid ON {table} (uid)')
|
||||||
|
db.execute(f'CREATE INDEX IF NOT EXISTS idx_{table}_expiry ON {table} (expiry)')
|
||||||
|
def memoizer(func):
|
||||||
|
if asyncio.iscoroutinefunction(func):
|
||||||
|
async def wrapper(*args, **kw):
|
||||||
|
uid = f'{prefix or func.__name__}:{repr(args)}{repr(kw)}'.encode('utf-8')
|
||||||
|
for row in db.execute(f'SELECT result FROM {table} WHERE uid=? AND expiry>? LIMIT 1', (uid, time.time())):
|
||||||
|
return cast(pickle.loads(row[0])) if cast else pickle.loads(row[0])
|
||||||
|
result = await func(*args, **kw)
|
||||||
|
with db:
|
||||||
|
db.execute(f'INSERT OR REPLACE INTO {table} (uid, result, expiry) VALUES (?, ?, ?)', (uid, pickle.dumps(result), time.time() + ttl))
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
def wrapper(*args, **kw):
|
||||||
|
uid = f'{prefix or func.__name__}:{repr(args)}{repr(kw)}'.encode('utf-8')
|
||||||
|
for row in db.execute(f'SELECT result FROM {table} WHERE uid=? AND expiry>? LIMIT 1', (uid, time.time())):
|
||||||
|
return cast(pickle.loads(row[0])) if cast else pickle.loads(row[0])
|
||||||
|
result = func(*args, **kw)
|
||||||
|
with db:
|
||||||
|
db.execute(f'INSERT OR REPLACE INTO {table} (uid, result, expiry) VALUES (?, ?, ?)', (uid, pickle.dumps(result), time.time() + ttl))
|
||||||
|
return result
|
||||||
|
wrapper.__name__ = func.__name__
|
||||||
|
return wrapper
|
||||||
|
return memoizer
|
||||||
|
|
||||||
|
def application(proc, config):
|
||||||
|
from quart import Quart, request, send_from_directory
|
||||||
|
app = Quart(__name__)
|
||||||
|
|
||||||
|
db = sqlite3.connect('./cache.db')
|
||||||
|
|
||||||
|
from io import StringIO
|
||||||
|
buffer = StringIO()
|
||||||
|
proc.create_task(log_writer(proc, buffer), name='@task:log')
|
||||||
|
|
||||||
|
@app.route('/api/clinic/list')
|
||||||
|
@memoize(db, table='memo', prefix='clinics', ttl=30*86400, persist=True)
|
||||||
|
async def http_api_clinic_list():
|
||||||
|
return [item async for item in ext_discovery.cmd_listclinics(proc)]
|
||||||
|
|
||||||
|
@app.route('/api/appointments/<clinics>/<date>')
|
||||||
|
@memoize(db)
|
||||||
|
async def http_api_appointments(clinics, date):
|
||||||
|
clinics = clinics.strip()
|
||||||
|
return [item async for item in ext_scheduling.cmd_appointments(proc, clinics=clinics.replace('|', '/'), date=date.replace('|', '/'))] if len(clinics) > 0 else []
|
||||||
|
|
||||||
|
@app.route('/api/lookup/<query>')
|
||||||
|
@memoize(db)
|
||||||
|
async def http_api_lookup(query):
|
||||||
|
query = re.sub(r'\s+', ' ', query.replace('^', '').strip())
|
||||||
|
return (await ext_patient.cmd_lookup_patient(proc, query)) if len(query) > 1 else []
|
||||||
|
|
||||||
|
@app.route('/api/lookup/<query>/<int:ordinal>')
|
||||||
|
@app.route('/api/lookup/<query>/<int:ordinal>/<force>')
|
||||||
|
@memoize(db, cast=tuple)
|
||||||
|
async def http_api_lookup_ordinal(query, ordinal, force=False):
|
||||||
|
query = re.sub(r'\s+', ' ', query.replace('^', '').strip())
|
||||||
|
return (await ext_patient.cmd_lookup_patient_ordinal(proc, query, ordinal, not not force)) if len(query) > 1 else '', 200, { 'Content-type': 'text/plain' }
|
||||||
|
|
||||||
|
@app.route('/api/measurements/<mrn>/<alpha>/<omega>')
|
||||||
|
@memoize(db)
|
||||||
|
async def http_api_measurements(mrn, alpha, omega):
|
||||||
|
import util
|
||||||
|
return [item async for item in ext_measurement.cmd_entries(proc, mrn, util.vista_strptime(alpha), util.vista_strptime(omega))]
|
||||||
|
|
||||||
|
@app.route('/api/labs/<mrn>/<alpha>/<omega>')
|
||||||
|
@memoize(db)
|
||||||
|
async def http_api_labs(mrn, alpha, omega):
|
||||||
|
import util
|
||||||
|
return [item async for item in ext_lab.cmd_reports(proc, mrn, util.vista_strptime(alpha), util.vista_strptime(omega))]
|
||||||
|
|
||||||
|
@app.route('/api/notes/<mrn>/<alpha>/<omega>')
|
||||||
|
@memoize(db)
|
||||||
|
async def http_api_notes(mrn, alpha, omega):
|
||||||
|
import util
|
||||||
|
return [item async for item in ext_note.cmd_reports(proc, mrn, util.vista_strptime(alpha), util.vista_strptime(omega))]
|
||||||
|
|
||||||
|
@app.route('/api/orders/<mrn>/<alpha>/<omega>')
|
||||||
|
@memoize(db)
|
||||||
|
async def http_api_orders(mrn, alpha, omega):
|
||||||
|
import util
|
||||||
|
return [item async for item in ext_order.cmd_entries(proc, mrn, util.vista_strptime(alpha), util.vista_strptime(omega))]
|
||||||
|
|
||||||
|
@app.route('/api/rcrs/patients/<alpha>/<omega>')
|
||||||
|
@memoize(db)
|
||||||
|
async def http_api_rcrs_patients(alpha, omega):
|
||||||
|
import util
|
||||||
|
return [item async for item in ext_rcrs.cmd_patients(proc, util.vista_strptime(alpha), util.vista_strptime(omega))]
|
||||||
|
|
||||||
|
@app.route('/api/rcrs/tumors/<alpha>/<omega>')
|
||||||
|
@memoize(db)
|
||||||
|
async def http_api_rcrs_tumors(alpha, omega):
|
||||||
|
import util
|
||||||
|
return [item async for item in ext_rcrs.cmd_tumors(proc, util.vista_strptime(alpha), util.vista_strptime(omega))]
|
||||||
|
|
||||||
|
@app.route('/api/log.txt')
|
||||||
|
async def http_api_log():
|
||||||
|
return buffer.getvalue(), 200, { 'Content-type': 'text/plain' }
|
||||||
|
|
||||||
|
@app.route('/api/config/<section>/<key>', methods=('GET',))
|
||||||
|
async def get_api_config_value(section, key):
|
||||||
|
try:
|
||||||
|
return config[section][key] or '', 200, { 'Content-type': 'text/plain' }
|
||||||
|
except KeyError:
|
||||||
|
return '', 200, { 'Content-type': 'text/plain' }
|
||||||
|
|
||||||
|
@app.route('/api/config/<section>/<key>', methods=('PUT',))
|
||||||
|
async def put_api_config_value(section, key):
|
||||||
|
config.set(section, key, (await request.get_data()).decode('utf-8'))
|
||||||
|
return {}
|
||||||
|
|
||||||
|
@app.route('/favicon.png')
|
||||||
|
async def http_favicon():
|
||||||
|
return await send_from_directory('./frontend/build', 'favicon.png')
|
||||||
|
|
||||||
|
@app.route('/_app/<path:path>')
|
||||||
|
async def http_static(path):
|
||||||
|
return await send_from_directory('./frontend/build/_app', path)
|
||||||
|
|
||||||
|
@app.route('/')
|
||||||
|
@app.route('/<path:path>')
|
||||||
|
async def http_spa(path='index.html'):
|
||||||
|
return await send_from_directory('./frontend/build', '200.html')
|
||||||
|
|
||||||
|
return app
|
||||||
|
|
||||||
|
async def frontend_build():
|
||||||
|
import shutil
|
||||||
|
proc = None
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
proc = await asyncio.create_subprocess_exec(shutil.which('npm'), 'run', '--prefix', 'frontend', 'build', '--', '--watch', stdin=None)
|
||||||
|
await proc.wait()
|
||||||
|
logger.warning(f'Frontend builder exited: {proc.returncode}')
|
||||||
|
except asyncio.exceptions.CancelledError:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
if proc is not None and proc.returncode is None:
|
||||||
|
logger.warning('Terminating frontend builder...')
|
||||||
|
await (await asyncio.create_subprocess_exec('taskkill', '/pid', str(proc.pid), '/t', '/f', stdin=None)).wait() # proc.terminate()
|
||||||
|
await proc.wait()
|
||||||
|
proc._transport.close() # https://github.com/python/cpython/issues/88050
|
||||||
|
|
||||||
|
def get_port():
|
||||||
|
import socket
|
||||||
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
sock.bind(('localhost', 0))
|
||||||
|
port = sock.getsockname()[1]
|
||||||
|
sock.close()
|
||||||
|
return port
|
||||||
|
|
||||||
|
async def main_async(opts: dict, dev=True):
|
||||||
|
import webbrowser, os
|
||||||
|
config = Config('./config.ini')
|
||||||
|
if dev:
|
||||||
|
builder = asyncio.create_task(frontend_build())
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
while not os.path.isfile('./frontend/build/200.html'):
|
||||||
|
logger.warning('Waiting for frontend build...')
|
||||||
|
await asyncio.sleep(5)
|
||||||
|
try:
|
||||||
|
proc = await autoproc.create_instrumented_subprocess_exec('ssh', '-T', '-oStrictHostKeyChecking=no', 'nopvista@vista.northport.med.va.gov', stdin=asyncio.subprocess.PIPE, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT, stdin_endl=b'\r')
|
||||||
|
asyncio.create_task(proc.wait())
|
||||||
|
if await ext_session.task_smartcard(proc, config): #proc.create_task(ext_session.task_smartcard(proc, config), name='@task:smartcard')
|
||||||
|
port = get_port()
|
||||||
|
asyncio.create_task(application(proc, config).run_task(port=port))
|
||||||
|
webbrowser.open(f'http://localhost:{port}/')
|
||||||
|
async for data in stdin_reader(opts):
|
||||||
|
#data = input_filter(data) # filter input, possibly spawning tasks
|
||||||
|
if proc.returncode is None and data is not None:
|
||||||
|
proc.sendline(data)
|
||||||
|
#await pipe.drain()
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
finally:
|
||||||
|
if dev:
|
||||||
|
builder.cancel()
|
||||||
|
await builder
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
asyncio.run(main_async({}))
|
1
requirements.txt
Normal file
1
requirements.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
quart
|
121
util.py
Normal file
121
util.py
Normal file
@ -0,0 +1,121 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import re
|
||||||
|
import datetime
|
||||||
|
import itertools
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from typing import Any, Union, AsyncGenerator, Iterable
|
||||||
|
|
||||||
|
class BucketCache(object):
|
||||||
|
"""Cache data in buckets, with customizable bucket keys and fetch callback"""
|
||||||
|
def __init__(self, data: dict=None):
|
||||||
|
self.data = defaultdict(list, {} if data is None else data)
|
||||||
|
def add(self, value) -> None:
|
||||||
|
"""Add one value"""
|
||||||
|
self.data[bkt := self.bucket(self.bucketkey(value))].append(value)
|
||||||
|
def update(self, iterable: Iterable) -> None:
|
||||||
|
"""Add multiple values"""
|
||||||
|
for value in iterable:
|
||||||
|
self.add(iterable)
|
||||||
|
async def retrieve(self, key_start, key_stop, ctx=None) -> AsyncGenerator[Any, None]:
|
||||||
|
"""Retrieve range, calling fetch callback for fresh data if needed"""
|
||||||
|
bkt_start = self.bucket(key_start)
|
||||||
|
bkt_stop = self.bucket(key_stop)
|
||||||
|
missing = (idx for idx in range(bkt_start, bkt_stop + 1) if idx not in self.data)
|
||||||
|
for k, g in itertools.groupby(missing, key=(lambda n, c=itertools.count(): n - next(c))): # group values into ranges
|
||||||
|
async for value in self.fetch(self.unbucket((g := tuple(g))[0]), self.unbucket(g[-1]), ctx):
|
||||||
|
if bkt_start <= self.bucket(self.bucketkey(value)) <= bkt_stop:
|
||||||
|
self.add(value)
|
||||||
|
for idx in range(bkt_start, bkt_stop + 1): # create empty bucket if not exists
|
||||||
|
for value in self.data[idx]:
|
||||||
|
if key_start <= self.bucketkey(value) <= key_stop:
|
||||||
|
yield value
|
||||||
|
@staticmethod
|
||||||
|
def bucketkey(value):
|
||||||
|
"""Get bucket key from value"""
|
||||||
|
raise NotImplementedError
|
||||||
|
@staticmethod
|
||||||
|
def bucket(key) -> int:
|
||||||
|
"""Get bucket index from bucket key"""
|
||||||
|
raise NotImplementedError
|
||||||
|
@staticmethod
|
||||||
|
def unbucket(idx: int):
|
||||||
|
raise NotImplementedError
|
||||||
|
@staticmethod
|
||||||
|
async def fetch(start, stop, ctx=None) -> AsyncGenerator[Any, None]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
class TimeSeriesCache(BucketCache):
|
||||||
|
"""Cache time series data in daily buckets"""
|
||||||
|
@staticmethod
|
||||||
|
def bucket(key: datetime.datetime) -> int:
|
||||||
|
return ((key.date() if isinstance(key, datetime.datetime) else key) - datetime.date(1970, 1, 1)).days
|
||||||
|
@staticmethod
|
||||||
|
def unbucket(idx: int) -> datetime.datetime:
|
||||||
|
return datetime.date(1970, 1, 1) + datetime.timedelta(days=idx)
|
||||||
|
|
||||||
|
re_dt_fileman = r'(?P<dt_fileman>(\d{3})(\d{2})(\d{2})\.(\d{2})(\d{2})(\d{2}))' # George Timson's format
|
||||||
|
re_dt_today = r'(?P<dt_today>T)' # today
|
||||||
|
re_dt_now = r'(?P<dt_now>N)' # now
|
||||||
|
re_dt_mdy = r'(?P<dt_mdy>(\d{1,2})[^\w@?]+(\d{1,2})[^\w@?]+(\d{2}|\d{4})\s*)' # m/d/yy, m/d/yyyy
|
||||||
|
re_dt_ymd = r'(?P<dt_ymd>(\d{4})[^\w@?]+(\d{1,2})[^\w@?]+(\d{1,2})\s*)' # yyyy/m/d
|
||||||
|
re_dt_yyyymmdd = r'(?P<dt_yyyymmdd>(\d{4})(\d{2})(\d{2}))' # yyyymmdd
|
||||||
|
re_dt_Mdy = r'(?P<dt_Mdy>([A-Z]{3,})[^\w@?]+(\d{1,2})[^\w@?]+(\d{2}|\d{4})\s*)' # M/d/yy, M/d/yyyy
|
||||||
|
re_dt_dMy = r'(?P<dt_dMy>((\d{1,2})[^\w@?]+[A-Z]{3,})[^\w@?]+(\d{2}|\d{4})\s*)' # d/M/yy, d/M/yyyy
|
||||||
|
re_dt_md = r'(?P<dt_md>(\d{1,2})[^\w@?]+(\d{1,2})\s*)' # m/d
|
||||||
|
re_dt_offset = r'(?P<offset>([-+]\d+)(H|W|M)?)' # +#U
|
||||||
|
re_dt_time = r'(?:@?(?P<time>(\d{1,2}):?(\d{1,2})))' # time
|
||||||
|
re_dt_ext = r'(?P<ext>[<>~])' # (nonstandard extension)
|
||||||
|
rx_dt = re.compile(f'^{re_dt_fileman}|(?:(?:{re_dt_today}|{re_dt_now}|{re_dt_mdy}|{re_dt_ymd}|{re_dt_yyyymmdd}|{re_dt_Mdy}|{re_dt_dMy}|{re_dt_md})?{re_dt_offset}?{re_dt_time}?{re_dt_ext}?)$', re.IGNORECASE)
|
||||||
|
def vista_strptime(s: str) -> datetime.datetime:
|
||||||
|
"""Parse VistA-style datetime strings into Python datetime.datetime objects"""
|
||||||
|
if m := rx_dt.match(s.strip().lower()):
|
||||||
|
m = m.groupdict()
|
||||||
|
if m['dt_fileman']:
|
||||||
|
m1 = re.match(re_dt_fileman, m['dt_fileman'])
|
||||||
|
return datetime.datetime(year=1700 + int(m1.group(2)), month=int(m1.group(3)), day=int(m1.group(4)))
|
||||||
|
date = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
|
if m['dt_today']: pass
|
||||||
|
elif m['dt_now']: date = datetime.datetime.now()
|
||||||
|
elif m['dt_mdy']: date = date.replace(month=int((m1 := re.match(re_dt_mdy, m['dt_mdy'], flags=re.I)).group(2)), day=int(m1.group(3)), year=vista_strptime_year(int(m1.group(4)), date))
|
||||||
|
elif m['dt_ymd']: date = date.replace(year=int((m1 := re.match(re_dt_ymd, m['dt_ymd'], flags=re.I)).group(2)), month=int(m1.group(3)), day=int(m1.group(4)))
|
||||||
|
elif m['dt_yyyymmdd']: date = date.replace(year=int((m1 := re.match(re_dt_yyyymmdd, m['dt_yyyymmdd'], flags=re.I)).group(2)), month=int(m1.group(3)), day=int(m1.group(4)))
|
||||||
|
elif m['dt_Mdy']: date = date.replace(month=vista_strptime_month((m1 := re.match(re_dt_Mdy, m['dt_Mdy'], flags=re.I)).group(2)), day=int(m1.group(3)), year=vista_strptime_year(int(m1.group(4)), date))
|
||||||
|
elif m['dt_dMy']: date = date.replace(day=int((m1 := re.match(re_dt_dMy, m['dt_dMy'], flags=re.I)).group(2)), month=vista_strptime_month(m1.group(3)), year=vista_strptime_year(int(m1.group(4)), date))
|
||||||
|
elif m['dt_md']: date = date.replace(month=int((m1 := re.match(re_dt_md, m['dt_md'], flags=re.I)).group(2)), day=int(m1.group(3)))
|
||||||
|
time = datetime.time()
|
||||||
|
if m['time']:
|
||||||
|
if m['dt_now']:
|
||||||
|
raise ValueError('cannot specify time with N or H offset')
|
||||||
|
m1 = re.match(re_dt_time, m['time'], flags=re.I)
|
||||||
|
date = date.replace(hour=int(m1.group(2)), minute=int(m1.group(3)))
|
||||||
|
if m['offset']:
|
||||||
|
m1 = re.match(re_dt_offset, m['offset'], flags=re.I)
|
||||||
|
if (offset_unit := m1.group(3)) == 'h' and (m['time'] or m['dt_today']):
|
||||||
|
raise ValueError('cannot specify time or T with H offset')
|
||||||
|
date = vista_strptime_offset(date, int(m1.group(2)), offset_unit or 'd')
|
||||||
|
if m['ext']:
|
||||||
|
if m['ext'] == '<':
|
||||||
|
date = date.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
|
elif m['ext'] == '>':
|
||||||
|
date = date.replace(hour=23, minute=59, second=59, microsecond=999999)
|
||||||
|
elif m['ext'] == '~':
|
||||||
|
date = date - datetime.timedelta(microseconds=1)
|
||||||
|
if date.year < 1800:
|
||||||
|
raise ValueError('cannot specify year before 1800')
|
||||||
|
return date
|
||||||
|
else:
|
||||||
|
raise ValueError('invalid date/time')
|
||||||
|
def vista_strptime_year(y: int, today: datetime.datetime) -> int:
|
||||||
|
"""Promote years to 4 digits"""
|
||||||
|
return y if y >= 1000 else y2000 if (y2000 := y + 2000) < today.year + 20 else y + 1900
|
||||||
|
def vista_strptime_month(m: str, mapping: dict={'jan': 1, 'feb': 2, 'mar': 3, 'apr': 4, 'may': 5, 'jun': 6, 'jul': 7, 'aug': 8, 'sep': 9, 'oct': 10, 'nov': 11, 'dec': 12}) -> int:
|
||||||
|
"""Convert en-US month names to integers"""
|
||||||
|
return mapping[m[:3]]
|
||||||
|
def vista_strptime_offset(base: datetime.datetime, offset: int, suffix: str, mapping: dict={'h': 'hours', 'd': 'days', 'w': 'weeks', 'm': 'months'}) -> datetime.datetime:
|
||||||
|
"""Apply datetime offset"""
|
||||||
|
return (base + datetime.timedelta(**{mapping[suffix]: offset})) if suffix != 'm' else base.replace(month=month) if (month := base.month + offset) <= 12 else base.replace(month=month%12, year=base.year + month//12)
|
||||||
|
def vista_strftime(date: Union[datetime.datetime, datetime.date]) -> str:
|
||||||
|
"""Convert Python datetime.datetime objects into conventional FileMan/Timson format"""
|
||||||
|
return f'{date.year - 1700:03}{date.month:02}{date.day:02}' if isinstance(date, datetime.date) else f'{date.year - 1700:03}{date.month:02}{date.day:02}.{date.hour:02}{date.minute:02}{date.second:02}'
|
Loading…
Reference in New Issue
Block a user