mirror of https://github.com/UMSKT/DB.git
inital commit - WIP code
This commit is contained in:
parent
a71c6c0e32
commit
37b23e0edc
|
@ -0,0 +1,926 @@
|
|||
xpkgdb/
|
||||
|
||||
### venv template
|
||||
# Virtualenv
|
||||
# http://iamzed.com/2009/05/07/a-primer-on-virtualenv/
|
||||
.Python
|
||||
[Bb]in
|
||||
[Ii]nclude
|
||||
[Ll]ib
|
||||
[Ll]ib64
|
||||
[Ll]ocal
|
||||
[Ss]cripts
|
||||
pyvenv.cfg
|
||||
.venv
|
||||
pip-selfcheck.json
|
||||
|
||||
### NotepadPP template
|
||||
# Notepad++ backups #
|
||||
*.bak
|
||||
|
||||
### VisualStudioCode template
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
!.vscode/*.code-snippets
|
||||
|
||||
# Local History for Visual Studio Code
|
||||
.history/
|
||||
|
||||
# Built Visual Studio Code Extensions
|
||||
*.vsix
|
||||
|
||||
### Linux template
|
||||
*~
|
||||
|
||||
# temporary files which can be created if a process still has a handle open of a deleted file
|
||||
.fuse_hidden*
|
||||
|
||||
# KDE directory preferences
|
||||
.directory
|
||||
|
||||
# Linux trash folder which might appear on any partition or disk
|
||||
.Trash-*
|
||||
|
||||
# .nfs files are created when an open file is removed but is still being accessed
|
||||
.nfs*
|
||||
|
||||
### VisualStudio template
|
||||
## Ignore Visual Studio temporary files, build results, and
|
||||
## files generated by popular Visual Studio add-ons.
|
||||
##
|
||||
## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore
|
||||
|
||||
# User-specific files
|
||||
*.rsuser
|
||||
*.suo
|
||||
*.user
|
||||
*.userosscache
|
||||
*.sln.docstates
|
||||
|
||||
# User-specific files (MonoDevelop/Xamarin Studio)
|
||||
*.userprefs
|
||||
|
||||
# Mono auto generated files
|
||||
mono_crash.*
|
||||
|
||||
# Build results
|
||||
[Dd]ebug/
|
||||
[Dd]ebugPublic/
|
||||
[Rr]elease/
|
||||
[Rr]eleases/
|
||||
x64/
|
||||
x86/
|
||||
[Ww][Ii][Nn]32/
|
||||
[Aa][Rr][Mm]/
|
||||
[Aa][Rr][Mm]64/
|
||||
bld/
|
||||
[Bb]in/
|
||||
[Oo]bj/
|
||||
[Ll]og/
|
||||
[Ll]ogs/
|
||||
|
||||
# Visual Studio 2015/2017 cache/options directory
|
||||
.vs/
|
||||
# Uncomment if you have tasks that create the project's static files in wwwroot
|
||||
#wwwroot/
|
||||
|
||||
# Visual Studio 2017 auto generated files
|
||||
Generated\ Files/
|
||||
|
||||
# MSTest test Results
|
||||
[Tt]est[Rr]esult*/
|
||||
[Bb]uild[Ll]og.*
|
||||
|
||||
# NUnit
|
||||
*.VisualState.xml
|
||||
TestResult.xml
|
||||
nunit-*.xml
|
||||
|
||||
# Build Results of an ATL Project
|
||||
[Dd]ebugPS/
|
||||
[Rr]eleasePS/
|
||||
dlldata.c
|
||||
|
||||
# Benchmark Results
|
||||
BenchmarkDotNet.Artifacts/
|
||||
|
||||
# .NET Core
|
||||
project.lock.json
|
||||
project.fragment.lock.json
|
||||
artifacts/
|
||||
|
||||
# ASP.NET Scaffolding
|
||||
ScaffoldingReadMe.txt
|
||||
|
||||
# StyleCop
|
||||
StyleCopReport.xml
|
||||
|
||||
# Files built by Visual Studio
|
||||
*_i.c
|
||||
*_p.c
|
||||
*_h.h
|
||||
*.ilk
|
||||
*.meta
|
||||
*.obj
|
||||
*.iobj
|
||||
*.pch
|
||||
*.pdb
|
||||
*.ipdb
|
||||
*.pgc
|
||||
*.pgd
|
||||
*.rsp
|
||||
*.sbr
|
||||
*.tlb
|
||||
*.tli
|
||||
*.tlh
|
||||
*.tmp
|
||||
*.tmp_proj
|
||||
*_wpftmp.csproj
|
||||
*.log
|
||||
*.tlog
|
||||
*.vspscc
|
||||
*.vssscc
|
||||
.builds
|
||||
*.pidb
|
||||
*.svclog
|
||||
*.scc
|
||||
|
||||
# Chutzpah Test files
|
||||
_Chutzpah*
|
||||
|
||||
# Visual C++ cache files
|
||||
ipch/
|
||||
*.aps
|
||||
*.ncb
|
||||
*.opendb
|
||||
*.opensdf
|
||||
*.sdf
|
||||
*.cachefile
|
||||
*.VC.db
|
||||
*.VC.VC.opendb
|
||||
|
||||
# Visual Studio profiler
|
||||
*.psess
|
||||
*.vsp
|
||||
*.vspx
|
||||
*.sap
|
||||
|
||||
# Visual Studio Trace Files
|
||||
*.e2e
|
||||
|
||||
# TFS 2012 Local Workspace
|
||||
$tf/
|
||||
|
||||
# Guidance Automation Toolkit
|
||||
*.gpState
|
||||
|
||||
# ReSharper is a .NET coding add-in
|
||||
_ReSharper*/
|
||||
*.[Rr]e[Ss]harper
|
||||
*.DotSettings.user
|
||||
|
||||
# TeamCity is a build add-in
|
||||
_TeamCity*
|
||||
|
||||
# DotCover is a Code Coverage Tool
|
||||
*.dotCover
|
||||
|
||||
# AxoCover is a Code Coverage Tool
|
||||
.axoCover/*
|
||||
!.axoCover/settings.json
|
||||
|
||||
# Coverlet is a free, cross platform Code Coverage Tool
|
||||
coverage*.json
|
||||
coverage*.xml
|
||||
coverage*.info
|
||||
|
||||
# Visual Studio code coverage results
|
||||
*.coverage
|
||||
*.coveragexml
|
||||
|
||||
# NCrunch
|
||||
_NCrunch_*
|
||||
.*crunch*.local.xml
|
||||
nCrunchTemp_*
|
||||
|
||||
# MightyMoose
|
||||
*.mm.*
|
||||
AutoTest.Net/
|
||||
|
||||
# Web workbench (sass)
|
||||
.sass-cache/
|
||||
|
||||
# Installshield output folder
|
||||
[Ee]xpress/
|
||||
|
||||
# DocProject is a documentation generator add-in
|
||||
DocProject/buildhelp/
|
||||
DocProject/Help/*.HxT
|
||||
DocProject/Help/*.HxC
|
||||
DocProject/Help/*.hhc
|
||||
DocProject/Help/*.hhk
|
||||
DocProject/Help/*.hhp
|
||||
DocProject/Help/Html2
|
||||
DocProject/Help/html
|
||||
|
||||
# Click-Once directory
|
||||
publish/
|
||||
|
||||
# Publish Web Output
|
||||
*.[Pp]ublish.xml
|
||||
*.azurePubxml
|
||||
# Note: Comment the next line if you want to checkin your web deploy settings,
|
||||
# but database connection strings (with potential passwords) will be unencrypted
|
||||
*.pubxml
|
||||
*.publishproj
|
||||
|
||||
# Microsoft Azure Web App publish settings. Comment the next line if you want to
|
||||
# checkin your Azure Web App publish settings, but sensitive information contained
|
||||
# in these scripts will be unencrypted
|
||||
PublishScripts/
|
||||
|
||||
# NuGet Packages
|
||||
*.nupkg
|
||||
# NuGet Symbol Packages
|
||||
*.snupkg
|
||||
# The packages folder can be ignored because of Package Restore
|
||||
**/[Pp]ackages/*
|
||||
# except build/, which is used as an MSBuild target.
|
||||
!**/[Pp]ackages/build/
|
||||
# Uncomment if necessary however generally it will be regenerated when needed
|
||||
#!**/[Pp]ackages/repositories.config
|
||||
# NuGet v3's project.json files produces more ignorable files
|
||||
*.nuget.props
|
||||
*.nuget.targets
|
||||
|
||||
# Microsoft Azure Build Output
|
||||
csx/
|
||||
*.build.csdef
|
||||
|
||||
# Microsoft Azure Emulator
|
||||
ecf/
|
||||
rcf/
|
||||
|
||||
# Windows Store app package directories and files
|
||||
AppPackages/
|
||||
BundleArtifacts/
|
||||
Package.StoreAssociation.xml
|
||||
_pkginfo.txt
|
||||
*.appx
|
||||
*.appxbundle
|
||||
*.appxupload
|
||||
|
||||
# Visual Studio cache files
|
||||
# files ending in .cache can be ignored
|
||||
*.[Cc]ache
|
||||
# but keep track of directories ending in .cache
|
||||
!?*.[Cc]ache/
|
||||
|
||||
# Others
|
||||
ClientBin/
|
||||
~$*
|
||||
*.dbmdl
|
||||
*.dbproj.schemaview
|
||||
*.jfm
|
||||
*.pfx
|
||||
*.publishsettings
|
||||
orleans.codegen.cs
|
||||
|
||||
# Including strong name files can present a security risk
|
||||
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
|
||||
#*.snk
|
||||
|
||||
# Since there are multiple workflows, uncomment next line to ignore bower_components
|
||||
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
|
||||
#bower_components/
|
||||
|
||||
# RIA/Silverlight projects
|
||||
Generated_Code/
|
||||
|
||||
# Backup & report files from converting an old project file
|
||||
# to a newer Visual Studio version. Backup files are not needed,
|
||||
# because we have git ;-)
|
||||
_UpgradeReport_Files/
|
||||
Backup*/
|
||||
UpgradeLog*.XML
|
||||
UpgradeLog*.htm
|
||||
ServiceFabricBackup/
|
||||
*.rptproj.bak
|
||||
|
||||
# SQL Server files
|
||||
*.mdf
|
||||
*.ldf
|
||||
*.ndf
|
||||
|
||||
# Business Intelligence projects
|
||||
*.rdl.data
|
||||
*.bim.layout
|
||||
*.bim_*.settings
|
||||
*.rptproj.rsuser
|
||||
*- [Bb]ackup.rdl
|
||||
*- [Bb]ackup ([0-9]).rdl
|
||||
*- [Bb]ackup ([0-9][0-9]).rdl
|
||||
|
||||
# Microsoft Fakes
|
||||
FakesAssemblies/
|
||||
|
||||
# GhostDoc plugin setting file
|
||||
*.GhostDoc.xml
|
||||
|
||||
# Node.js Tools for Visual Studio
|
||||
.ntvs_analysis.dat
|
||||
node_modules/
|
||||
|
||||
# Visual Studio 6 build log
|
||||
*.plg
|
||||
|
||||
# Visual Studio 6 workspace options file
|
||||
*.opt
|
||||
|
||||
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
|
||||
*.vbw
|
||||
|
||||
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
|
||||
*.vbp
|
||||
|
||||
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
|
||||
*.dsw
|
||||
*.dsp
|
||||
|
||||
# Visual Studio 6 technical files
|
||||
|
||||
# Visual Studio LightSwitch build output
|
||||
**/*.HTMLClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/ModelManifest.xml
|
||||
**/*.Server/GeneratedArtifacts
|
||||
**/*.Server/ModelManifest.xml
|
||||
_Pvt_Extensions
|
||||
|
||||
# Paket dependency manager
|
||||
.paket/paket.exe
|
||||
paket-files/
|
||||
|
||||
# FAKE - F# Make
|
||||
.fake/
|
||||
|
||||
# CodeRush personal settings
|
||||
.cr/personal
|
||||
|
||||
# Python Tools for Visual Studio (PTVS)
|
||||
__pycache__/
|
||||
*.pyc
|
||||
|
||||
# Cake - Uncomment if you are using it
|
||||
# tools/**
|
||||
# !tools/packages.config
|
||||
|
||||
# Tabs Studio
|
||||
*.tss
|
||||
|
||||
# Telerik's JustMock configuration file
|
||||
*.jmconfig
|
||||
|
||||
# BizTalk build output
|
||||
*.btp.cs
|
||||
*.btm.cs
|
||||
*.odx.cs
|
||||
*.xsd.cs
|
||||
|
||||
# OpenCover UI analysis results
|
||||
OpenCover/
|
||||
|
||||
# Azure Stream Analytics local run output
|
||||
ASALocalRun/
|
||||
|
||||
# MSBuild Binary and Structured Log
|
||||
*.binlog
|
||||
|
||||
# NVidia Nsight GPU debugger configuration file
|
||||
*.nvuser
|
||||
|
||||
# MFractors (Xamarin productivity tool) working folder
|
||||
.mfractor/
|
||||
|
||||
# Local History for Visual Studio
|
||||
.localhistory/
|
||||
|
||||
# Visual Studio History (VSHistory) files
|
||||
.vshistory/
|
||||
|
||||
# BeatPulse healthcheck temp database
|
||||
healthchecksdb
|
||||
|
||||
# Backup folder for Package Reference Convert tool in Visual Studio 2017
|
||||
MigrationBackup/
|
||||
|
||||
# Ionide (cross platform F# VS Code tools) working folder
|
||||
.ionide/
|
||||
|
||||
# Fody - auto-generated XML schema
|
||||
FodyWeavers.xsd
|
||||
|
||||
# VS Code files for those working on multiple tools
|
||||
*.code-workspace
|
||||
|
||||
# Local History for Visual Studio Code
|
||||
|
||||
# Windows Installer files from build outputs
|
||||
*.cab
|
||||
*.msi
|
||||
*.msix
|
||||
*.msm
|
||||
*.msp
|
||||
|
||||
# JetBrains Rider
|
||||
*.sln.iml
|
||||
|
||||
### Windows template
|
||||
# Windows thumbnail cache files
|
||||
Thumbs.db
|
||||
Thumbs.db:encryptable
|
||||
ehthumbs.db
|
||||
ehthumbs_vista.db
|
||||
|
||||
# Dump file
|
||||
*.stackdump
|
||||
|
||||
# Folder config file
|
||||
[Dd]esktop.ini
|
||||
|
||||
# Recycle Bin used on file shares
|
||||
$RECYCLE.BIN/
|
||||
|
||||
# Windows Installer files
|
||||
|
||||
# Windows shortcuts
|
||||
*.lnk
|
||||
|
||||
### vs template
|
||||
## Ignore Visual Studio temporary files, build results, and
|
||||
## files generated by popular Visual Studio add-ons.
|
||||
##
|
||||
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
|
||||
|
||||
# User-specific files
|
||||
|
||||
# User-specific files (MonoDevelop/Xamarin Studio)
|
||||
|
||||
# Mono auto generated files
|
||||
|
||||
# Build results
|
||||
|
||||
# Visual Studio 2015/2017 cache/options directory
|
||||
# Uncomment if you have tasks that create the project's static files in wwwroot
|
||||
#wwwroot/
|
||||
|
||||
# Visual Studio 2017 auto generated files
|
||||
|
||||
# MSTest test Results
|
||||
|
||||
# NUnit
|
||||
|
||||
# Build Results of an ATL Project
|
||||
|
||||
# Benchmark Results
|
||||
|
||||
# .NET Core
|
||||
|
||||
# StyleCop
|
||||
|
||||
# Files built by Visual Studio
|
||||
|
||||
# Chutzpah Test files
|
||||
|
||||
# Visual C++ cache files
|
||||
|
||||
# Visual Studio profiler
|
||||
|
||||
# Visual Studio Trace Files
|
||||
|
||||
# TFS 2012 Local Workspace
|
||||
|
||||
# Guidance Automation Toolkit
|
||||
|
||||
# ReSharper is a .NET coding add-in
|
||||
|
||||
# TeamCity is a build add-in
|
||||
|
||||
# DotCover is a Code Coverage Tool
|
||||
|
||||
# AxoCover is a Code Coverage Tool
|
||||
|
||||
# Coverlet is a free, cross platform Code Coverage Tool
|
||||
coverage*[.json, .xml, .info]
|
||||
|
||||
# Visual Studio code coverage results
|
||||
|
||||
# NCrunch
|
||||
|
||||
# MightyMoose
|
||||
|
||||
# Web workbench (sass)
|
||||
|
||||
# Installshield output folder
|
||||
|
||||
# DocProject is a documentation generator add-in
|
||||
|
||||
# Click-Once directory
|
||||
|
||||
# Publish Web Output
|
||||
# Note: Comment the next line if you want to checkin your web deploy settings,
|
||||
# but database connection strings (with potential passwords) will be unencrypted
|
||||
|
||||
# Microsoft Azure Web App publish settings. Comment the next line if you want to
|
||||
# checkin your Azure Web App publish settings, but sensitive information contained
|
||||
# in these scripts will be unencrypted
|
||||
|
||||
# NuGet Packages
|
||||
# NuGet Symbol Packages
|
||||
# The packages folder can be ignored because of Package Restore
|
||||
# except build/, which is used as an MSBuild target.
|
||||
# Uncomment if necessary however generally it will be regenerated when needed
|
||||
#!**/[Pp]ackages/repositories.config
|
||||
# NuGet v3's project.json files produces more ignorable files
|
||||
|
||||
# Microsoft Azure Build Output
|
||||
|
||||
# Microsoft Azure Emulator
|
||||
|
||||
# Windows Store app package directories and files
|
||||
|
||||
# Visual Studio cache files
|
||||
# files ending in .cache can be ignored
|
||||
# but keep track of directories ending in .cache
|
||||
|
||||
# Others
|
||||
|
||||
# Including strong name files can present a security risk
|
||||
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
|
||||
#*.snk
|
||||
|
||||
# Since there are multiple workflows, uncomment next line to ignore bower_components
|
||||
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
|
||||
#bower_components/
|
||||
|
||||
# RIA/Silverlight projects
|
||||
|
||||
# Backup & report files from converting an old project file
|
||||
# to a newer Visual Studio version. Backup files are not needed,
|
||||
# because we have git ;-)
|
||||
|
||||
# SQL Server files
|
||||
|
||||
# Business Intelligence projects
|
||||
|
||||
# Microsoft Fakes
|
||||
|
||||
# GhostDoc plugin setting file
|
||||
|
||||
# Node.js Tools for Visual Studio
|
||||
|
||||
# Visual Studio 6 build log
|
||||
|
||||
# Visual Studio 6 workspace options file
|
||||
|
||||
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
|
||||
|
||||
# Visual Studio LightSwitch build output
|
||||
|
||||
# Paket dependency manager
|
||||
|
||||
# FAKE - F# Make
|
||||
|
||||
# CodeRush personal settings
|
||||
|
||||
# Python Tools for Visual Studio (PTVS)
|
||||
|
||||
# Cake - Uncomment if you are using it
|
||||
# tools/**
|
||||
# !tools/packages.config
|
||||
|
||||
# Tabs Studio
|
||||
|
||||
# Telerik's JustMock configuration file
|
||||
|
||||
# BizTalk build output
|
||||
|
||||
# OpenCover UI analysis results
|
||||
|
||||
# Azure Stream Analytics local run output
|
||||
|
||||
# MSBuild Binary and Structured Log
|
||||
|
||||
# NVidia Nsight GPU debugger configuration file
|
||||
|
||||
# MFractors (Xamarin productivity tool) working folder
|
||||
|
||||
# Local History for Visual Studio
|
||||
|
||||
# BeatPulse healthcheck temp database
|
||||
|
||||
# Backup folder for Package Reference Convert tool in Visual Studio 2017
|
||||
|
||||
# Ionide (cross platform F# VS Code tools) working folder
|
||||
|
||||
### macOS template
|
||||
# General
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
|
||||
# Icon must end with two \r
|
||||
Icon
|
||||
|
||||
# Thumbnails
|
||||
._*
|
||||
|
||||
# Files that might appear in the root of a volume
|
||||
.DocumentRevisions-V100
|
||||
.fseventsd
|
||||
.Spotlight-V100
|
||||
.TemporaryItems
|
||||
.Trashes
|
||||
.VolumeIcon.icns
|
||||
.com.apple.timemachine.donotpresent
|
||||
|
||||
# Directories potentially created on remote AFP share
|
||||
.AppleDB
|
||||
.AppleDesktop
|
||||
Network Trash Folder
|
||||
Temporary Items
|
||||
.apdisk
|
||||
|
||||
### PyCharm+all template
|
||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||
|
||||
# User-specific stuff
|
||||
.idea/**/workspace.xml
|
||||
.idea/**/tasks.xml
|
||||
.idea/**/usage.statistics.xml
|
||||
.idea/**/dictionaries
|
||||
.idea/**/shelf
|
||||
|
||||
# AWS User-specific
|
||||
.idea/**/aws.xml
|
||||
|
||||
# Generated files
|
||||
.idea/**/contentModel.xml
|
||||
|
||||
# Sensitive or high-churn files
|
||||
.idea/**/dataSources/
|
||||
.idea/**/dataSources.ids
|
||||
.idea/**/dataSources.local.xml
|
||||
.idea/**/sqlDataSources.xml
|
||||
.idea/**/dynamic.xml
|
||||
.idea/**/uiDesigner.xml
|
||||
.idea/**/dbnavigator.xml
|
||||
|
||||
# Gradle
|
||||
.idea/**/gradle.xml
|
||||
.idea/**/libraries
|
||||
|
||||
# Gradle and Maven with auto-import
|
||||
# When using Gradle or Maven with auto-import, you should exclude module files,
|
||||
# since they will be recreated, and may cause churn. Uncomment if using
|
||||
# auto-import.
|
||||
# .idea/artifacts
|
||||
# .idea/compiler.xml
|
||||
# .idea/jarRepositories.xml
|
||||
# .idea/modules.xml
|
||||
# .idea/*.iml
|
||||
# .idea/modules
|
||||
# *.iml
|
||||
# *.ipr
|
||||
|
||||
# CMake
|
||||
cmake-build-*/
|
||||
|
||||
# Mongo Explorer plugin
|
||||
.idea/**/mongoSettings.xml
|
||||
|
||||
# File-based project format
|
||||
*.iws
|
||||
|
||||
# IntelliJ
|
||||
out/
|
||||
|
||||
# mpeltonen/sbt-idea plugin
|
||||
.idea_modules/
|
||||
|
||||
# JIRA plugin
|
||||
atlassian-ide-plugin.xml
|
||||
|
||||
# Cursive Clojure plugin
|
||||
.idea/replstate.xml
|
||||
|
||||
# SonarLint plugin
|
||||
.idea/sonarlint/
|
||||
|
||||
# Crashlytics plugin (for Android Studio and IntelliJ)
|
||||
com_crashlytics_export_strings.xml
|
||||
crashlytics.properties
|
||||
crashlytics-build.properties
|
||||
fabric.properties
|
||||
|
||||
# Editor-based Rest Client
|
||||
.idea/httpRequests
|
||||
|
||||
# Android studio 3.1+ serialized cache file
|
||||
.idea/caches/build_file_checksums.ser
|
||||
|
||||
### Python template
|
||||
# Byte-compiled / optimized / DLL files
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
### SublimeText template
|
||||
# Cache files for Sublime Text
|
||||
*.tmlanguage.cache
|
||||
*.tmPreferences.cache
|
||||
*.stTheme.cache
|
||||
|
||||
# Workspace files are user-specific
|
||||
*.sublime-workspace
|
||||
|
||||
# Project files should be checked into the repository, unless a significant
|
||||
# proportion of contributors will probably not be using Sublime Text
|
||||
# *.sublime-project
|
||||
|
||||
# SFTP configuration file
|
||||
sftp-config.json
|
||||
sftp-config-alt*.json
|
||||
|
||||
# Package control specific files
|
||||
Package Control.last-run
|
||||
Package Control.ca-list
|
||||
Package Control.ca-bundle
|
||||
Package Control.system-ca-bundle
|
||||
Package Control.cache/
|
||||
Package Control.ca-certs/
|
||||
Package Control.merged-ca-bundle
|
||||
Package Control.user-ca-bundle
|
||||
oscrypto-ca-bundle.crt
|
||||
bh_unicode_properties.cache
|
||||
|
||||
# Sublime-github package stores a github token in this file
|
||||
# https://packagecontrol.io/packages/sublime-github
|
||||
GitHub.sublime-settings
|
||||
|
|
@ -1,2 +1,9 @@
|
|||
# DB
|
||||
Scripts used to make the UMSKT keys.json DB
|
||||
|
||||
|
||||
## Requirements
|
||||
|
||||
* python
|
||||
* sagemath
|
||||
* pefile
|
|
@ -0,0 +1,110 @@
|
|||
#!/usr/bin/env python
|
||||
import sqlite3
|
||||
import json
|
||||
|
||||
|
||||
def serialize_binary_data(data):
|
||||
if isinstance(data, bytes):
|
||||
return str(data)
|
||||
elif isinstance(data, list):
|
||||
return [serialize_binary_data(item) for item in data]
|
||||
elif isinstance(data, dict):
|
||||
return {key: serialize_binary_data(value) for key, value in data.items()}
|
||||
return data
|
||||
|
||||
|
||||
def dump_sqlite_to_json(database_file, output_file):
|
||||
connection = sqlite3.connect(database_file)
|
||||
connection.row_factory = sqlite3.Row
|
||||
cursor = connection.cursor()
|
||||
|
||||
tables = [
|
||||
"BINK",
|
||||
"DPCDLL",
|
||||
"DPCDLL_ENTRIES",
|
||||
"LICDLL",
|
||||
"PIDGEN",
|
||||
"BINK_PIDGEN",
|
||||
"PRODUCT"
|
||||
]
|
||||
|
||||
data = {}
|
||||
for table in tables:
|
||||
cursor.execute(f"SELECT * FROM {table}")
|
||||
rows = cursor.fetchall()
|
||||
|
||||
if table == "BINK_PIDGEN":
|
||||
merged_data = {}
|
||||
for row in rows:
|
||||
pidgen_id = row["PIDGEN"]
|
||||
bink = row["BINK"]
|
||||
|
||||
if pidgen_id in merged_data:
|
||||
merged_data[pidgen_id]["BINK"].append(bink)
|
||||
else:
|
||||
merged_data[pidgen_id] = {}
|
||||
merged_data[pidgen_id]["BINK"] = [bink]
|
||||
|
||||
serialized_merged_data = serialize_binary_data(merged_data)
|
||||
data[table] = serialized_merged_data
|
||||
elif table == "PRODUCT":
|
||||
product_data = {}
|
||||
for row in rows:
|
||||
shortname = row["shortname"]
|
||||
product_data[shortname] = serialize_binary_data(dict(row))
|
||||
|
||||
data[table] = product_data
|
||||
elif table == "LICDLL":
|
||||
licdll_data = {}
|
||||
for row in rows:
|
||||
row_dict = dict(row)
|
||||
id_value = row_dict.pop("ID", None)
|
||||
licdll_data[id_value] = serialize_binary_data(row_dict)
|
||||
|
||||
data[table] = licdll_data
|
||||
elif table == "DPCDLL_ENTRIES":
|
||||
dpcll_entries_data = {}
|
||||
for row in rows:
|
||||
dpcll_id = row["DPCDLL_ID"]
|
||||
bink_id = row["BINK_ID"]
|
||||
entry_data = dict(row)
|
||||
del entry_data["DPCDLL_ID"]
|
||||
del entry_data["BINK_ID"]
|
||||
|
||||
if dpcll_id not in dpcll_entries_data:
|
||||
dpcll_entries_data[dpcll_id] = {}
|
||||
|
||||
dpcll_entries_data[dpcll_id][bink_id] = serialize_binary_data(entry_data)
|
||||
|
||||
data[table] = dpcll_entries_data
|
||||
elif table == "DPCDLL":
|
||||
dpcll_data = {}
|
||||
for row in rows:
|
||||
row_dict = dict(row)
|
||||
id_value = row_dict.pop("ID", None)
|
||||
dpcll_data[id_value] = serialize_binary_data(row_dict)
|
||||
|
||||
data[table] = dpcll_data
|
||||
elif table == "BINK":
|
||||
bink_data = {}
|
||||
for row in rows:
|
||||
row_dict = dict(row)
|
||||
id_value = row_dict.pop("ID", None)
|
||||
bink_data[id_value] = serialize_binary_data(row_dict)
|
||||
|
||||
data[table] = bink_data
|
||||
else:
|
||||
serialized_rows = [serialize_binary_data(dict(row)) for row in rows]
|
||||
data[table] = serialized_rows
|
||||
|
||||
with open(output_file, 'w') as file:
|
||||
json.dump(data, file, indent=4)
|
||||
|
||||
cursor.close()
|
||||
connection.close()
|
||||
|
||||
|
||||
# Usage example
|
||||
database_file = "xpkgdb.sqlite"
|
||||
output_file = "xpkgdb.json"
|
||||
dump_sqlite_to_json(database_file, output_file)
|
|
@ -0,0 +1,226 @@
|
|||
#!/usr/bin/env sage
|
||||
|
||||
import os
|
||||
import sys
|
||||
import sqlite3
|
||||
import parser
|
||||
import tempfile
|
||||
import re
|
||||
import subprocess
|
||||
import hashlib
|
||||
import traceback
|
||||
|
||||
|
||||
def extract_file_with_7z(contianer_path, file_path):
|
||||
try:
|
||||
output = subprocess.check_output(['7z', 'x', '-so', contianer_path, file_path], stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
output = e.output
|
||||
return output
|
||||
|
||||
|
||||
def process_file_contents_with_7z(container_path, file_path, process_func, container_extension=None):
|
||||
process_func(extract_file_with_7z(container_path, file_path), file_path, container_extension)
|
||||
|
||||
|
||||
def process_container_with_7z(file_path):
|
||||
files = {}
|
||||
# Use 7zip to list the contents of the MSI file
|
||||
command = ['7z', 'l', '-slt', '-r', '-sdel', '-so', file_path]
|
||||
try:
|
||||
output = subprocess.check_output(command, universal_newlines=True, stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
output = e.output
|
||||
|
||||
# Parse the output
|
||||
lines = output.splitlines()
|
||||
i = 0
|
||||
while i < len(lines):
|
||||
file = {'path': '', 'size': 0, 'packed_size': 0, 'created': '', 'modified': ''}
|
||||
|
||||
if lines[i].startswith('Path ='):
|
||||
# Extract the path, size, packed size, created, and modified information
|
||||
file['path'] = re.search(r'Path = (.+)', lines[i]).group(1)
|
||||
|
||||
while i < len(lines):
|
||||
line = lines[i]
|
||||
try:
|
||||
if line.startswith('Size = '):
|
||||
file['size'] = re.search(r'Size = (\d+)', line).group(1)
|
||||
elif line.startswith('Packed Size = '):
|
||||
file['packed_size'] = re.search(r'Packed Size = (\d+)', line).group(1)
|
||||
elif line.startswith('Created = '):
|
||||
file['created'] = re.search(r'Created = (.+)', line).group(1)
|
||||
elif line.startswith('Modified = '):
|
||||
file['modified'] = re.search(r'Modified = (.+)', line).group(1)
|
||||
elif line == "":
|
||||
break
|
||||
except AttributeError:
|
||||
i = i
|
||||
|
||||
i += 1
|
||||
|
||||
files[file['path']] = file
|
||||
|
||||
i += 1
|
||||
|
||||
return files
|
||||
|
||||
|
||||
file_prefixes = ['pidgen', 'licdll', 'dpcdll', 'mso', 'msa', 'pidca']
|
||||
|
||||
|
||||
def process_nested_file(temp_container_path, path):
|
||||
path_lower = path.lower()
|
||||
if any(path_lower.startswith(prefix) for prefix in file_prefixes):
|
||||
if path_lower.endswith('dll'):
|
||||
compressed_file_data = extract_file_with_7z(temp_container_path, path)
|
||||
process_dll(compressed_file_data, path)
|
||||
|
||||
if path_lower.endswith('dl_'):
|
||||
compressed_file_data = extract_file_with_7z(temp_container_path, path)
|
||||
process_container(compressed_file_data, path, container_extension='.dl_')
|
||||
|
||||
|
||||
def process_container(file_data, file_path, container_extension=None):
|
||||
# Create a temporary file
|
||||
with tempfile.NamedTemporaryFile(suffix=container_extension, delete=False) as temp_container_file:
|
||||
temp_container_path = temp_container_file.name
|
||||
temp_container_file.write(file_data)
|
||||
temp_container_file.close()
|
||||
|
||||
files = process_container_with_7z(temp_container_path)
|
||||
|
||||
if container_extension == '.msi':
|
||||
for path, file in files.items():
|
||||
process_nested_file(temp_container_path, path)
|
||||
if path.lower().startswith('binary.'):
|
||||
# Read the contents of files starting with 'Binary.'
|
||||
print(f'Parsing MSI Stream Name: {path}')
|
||||
process_file_contents_with_7z(temp_container_path, path, process_dll)
|
||||
|
||||
if container_extension == '.cab':
|
||||
for path, file in files.items():
|
||||
process_nested_file(temp_container_path, path)
|
||||
|
||||
if container_extension == '.dl_':
|
||||
process_file_contents_with_7z(temp_container_path, file_path, process_dll)
|
||||
|
||||
# Remove the temporary container file
|
||||
os.remove(temp_container_path)
|
||||
|
||||
|
||||
def process_dll(file_data, file_path, container_extension=None):
|
||||
# Process the DLL file as needed
|
||||
print(f'[{file_path}]: Parsing file')
|
||||
|
||||
pidgen_data = parser.pidgen.parse(file_data)
|
||||
if pidgen_data != {}:
|
||||
print(f'[{file_path}]: Found PIDGEN data')
|
||||
|
||||
sha1 = hashlib.sha1(file_data).hexdigest()
|
||||
print(f'[{file_path}]: SHA1: {sha1}')
|
||||
print(pidgen_data)
|
||||
|
||||
try:
|
||||
dpcll_data = parser.dpcdll.parse(file_data)
|
||||
if dpcll_data != {}:
|
||||
print(f'[{file_path}]: Found DPCDLL data')
|
||||
|
||||
sha1 = hashlib.sha1(file_data).hexdigest()
|
||||
print(f'[{file_path}]: SHA1: {sha1}')
|
||||
except ValueError:
|
||||
dpcll_data = {}
|
||||
|
||||
if any(file_path.lower().startswith(prefix) for prefix in ['licdll', 'mso.dll', 'msa.dll']):
|
||||
print(f'[{file_path}]: Cataloguing a LICDLL type file')
|
||||
|
||||
sha1 = hashlib.sha1(file_data).hexdigest()
|
||||
print(f'[{file_path}]: SHA1: {sha1}')
|
||||
|
||||
|
||||
def process_iso(file_path):
|
||||
files = process_container_with_7z(file_path)
|
||||
|
||||
for path, file in files.items():
|
||||
if path.lower().endswith('.msi'):
|
||||
print(f'[{path}]: Processing MSI file')
|
||||
process_file_contents_with_7z(file_path, path, process_container, container_extension='.msi')
|
||||
|
||||
if path.lower().endswith('.cab'):
|
||||
print(f'[{path}]: Processing CAB file')
|
||||
process_file_contents_with_7z(file_path, path, process_container, container_extension='.cab')
|
||||
|
||||
if path.lower().endswith('.dll'):
|
||||
print(f'[{path}]: Processing DLL file')
|
||||
process_file_contents_with_7z(file_path, path, process_dll)
|
||||
|
||||
|
||||
def process_file_or_folder(path):
|
||||
extensions = ['.iso', '.img']
|
||||
if os.path.isfile(path):
|
||||
print(f'[{path}]: Processing ISO/Disk Image file')
|
||||
process_iso(path)
|
||||
|
||||
elif os.path.isdir(path):
|
||||
print(f'[{path}]: Recursing through folder')
|
||||
for root, dirs, files in os.walk(path):
|
||||
for file in files:
|
||||
if file.lower().endswith('.iso'):
|
||||
iso_path = os.path.join(root, file)
|
||||
print(f'Processing ISO file: {iso_path}')
|
||||
process_iso(iso_path)
|
||||
else:
|
||||
print(f'Invalid file or folder: {path}')
|
||||
|
||||
|
||||
def check_7z_command():
|
||||
if sys.platform.startswith('win'): # Windows
|
||||
try:
|
||||
# Use the 'where' command to check if '7z' is in the path
|
||||
subprocess.check_output(['where', '7z'])
|
||||
return True
|
||||
except subprocess.CalledProcessError:
|
||||
return False
|
||||
else: # Unix-based systems (Linux, macOS, etc.)
|
||||
try:
|
||||
# Use the 'which' command to check if '7z' is in the path
|
||||
subprocess.check_output(['which', '7z'])
|
||||
return True
|
||||
except subprocess.CalledProcessError:
|
||||
return False
|
||||
|
||||
|
||||
# Main function
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
print('Usage: {} <file.iso|folder> <database>'.format(sys.argv[0]))
|
||||
print('Parses <file.iso|folder> for various DLLs required for product licensing and activation')
|
||||
print('Data is saved to the SQLite3 Database <database> and will be created if it does not exist')
|
||||
print('If a <folder> is specified, it will search recursively for files ending in .iso/.img')
|
||||
sys.exit(1)
|
||||
|
||||
path = sys.argv[1]
|
||||
database = sys.argv[2]
|
||||
|
||||
if not os.path.exists(database):
|
||||
conn = sqlite3.connect(database)
|
||||
with open('newdb.sql') as f:
|
||||
conn.executescript(f.read())
|
||||
conn.close()
|
||||
|
||||
conn = sqlite3.connect(database)
|
||||
process_file_or_folder(path)
|
||||
conn.close()
|
||||
|
||||
|
||||
# Entry point
|
||||
if __name__ == '__main__':
|
||||
if not check_7z_command():
|
||||
print('7zip is not in the path, please add the 7z executable to the system path and try again')
|
||||
try:
|
||||
main()
|
||||
except Exception as e:
|
||||
print('An error occurred:', e)
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
|
@ -0,0 +1,4 @@
|
|||
import parser.dpcdll as dpcdll
|
||||
import parser.pidgen as pidgen
|
||||
|
||||
__all__ = ['dpcdll', 'pidgen']
|
|
@ -0,0 +1,3 @@
|
|||
from .dpcdll import parse
|
||||
|
||||
__all__ = ['parse']
|
|
@ -0,0 +1,61 @@
|
|||
from struct import unpack
|
||||
|
||||
|
||||
def read_int(data, offset):
|
||||
return unpack('<I', data[offset:offset + 4])[0]
|
||||
|
||||
|
||||
def parse(file_data):
|
||||
lic_types = ['NULL', 'Volume', 'Retail', 'Evaluation', 'Tablet', 'OEM', 'Embedded']
|
||||
|
||||
dpc_data = {}
|
||||
|
||||
tmp = file_data
|
||||
offset = tmp.find(b'\x00\x00\x00\xff\xff\xff\x7f\x80') - 21
|
||||
del tmp
|
||||
|
||||
if offset == -22:
|
||||
raise ValueError('Offset not found')
|
||||
|
||||
while file_data[offset:offset + 4] != b'\x00\x00\x00\x00':
|
||||
offset -= 164
|
||||
|
||||
offset -= 4
|
||||
|
||||
while True:
|
||||
if offset < 0 or offset + 32 >= len(file_data):
|
||||
raise ValueError('Error in offset or not enough data')
|
||||
|
||||
ind = read_int(file_data, offset)
|
||||
bink_id = hex(read_int(file_data, offset + 4)).zfill(4).upper()
|
||||
min_pid = read_int(file_data, offset + 8)
|
||||
max_pid = read_int(file_data, offset + 12)
|
||||
|
||||
if min_pid > 999 or max_pid > 999:
|
||||
break
|
||||
|
||||
lic_type = read_int(file_data, offset + 16)
|
||||
|
||||
if lic_type > 6:
|
||||
break
|
||||
|
||||
days_to_act = str(read_int(file_data, offset + 20))
|
||||
eval_days = str(read_int(file_data, offset + 24))
|
||||
sig_len = read_int(file_data, offset + 28)
|
||||
|
||||
if offset + 32 + sig_len >= len(file_data):
|
||||
raise ValueError('Error in signature length or not enough data')
|
||||
|
||||
offset += 32 + sig_len
|
||||
|
||||
if bink_id not in dpc_data:
|
||||
dpc_data[bink_id] = []
|
||||
|
||||
dpc_data[bink_id].append({
|
||||
'Type': lic_types[lic_type],
|
||||
'PIDRange': [min_pid, max_pid],
|
||||
'EvaluationDays': eval_days,
|
||||
'ActivationGraceDays': days_to_act
|
||||
})
|
||||
|
||||
return dpc_data
|
|
@ -0,0 +1,3 @@
|
|||
from .pidgen import parse
|
||||
|
||||
__all__ = ['parse']
|
|
@ -0,0 +1,4 @@
|
|||
from .ecdlpsolver import parse
|
||||
from .ecdlpsolver import parse_bink_data_with_sage
|
||||
|
||||
__all__ = ['parse', 'parse_bink_data_with_sage']
|
|
@ -0,0 +1,131 @@
|
|||
try:
|
||||
from sage.all import *
|
||||
except ImportError:
|
||||
print("Unable to load SageMath")
|
||||
|
||||
# def warnx(*args, **kwargs):
|
||||
# print(*args, file=sys.stderr, **kwargs)
|
||||
|
||||
|
||||
# def tee(*output, file=None, **kwargs):
|
||||
# print(*output, file=sys.stdout, **kwargs)
|
||||
# if file is not None:
|
||||
# print(*output, file=file, **kwargs)
|
||||
|
||||
|
||||
def btoi(bb):
|
||||
return int.from_bytes(bb, byteorder='little')
|
||||
|
||||
|
||||
def rfactor(m, keysize, B):
|
||||
digits = len('%d' % (2 ^ keysize - 1))
|
||||
ff = ecm.find_factor(m, factor_digits=digits) # Try to find a good candidate
|
||||
for f in ff:
|
||||
if f > 2 and f.is_prime() and not f * B:
|
||||
# warnx("ok for %d" % f)
|
||||
return True, [f]
|
||||
else:
|
||||
# warnx("bad run: %s" % ff)
|
||||
return False, ff
|
||||
|
||||
|
||||
def parse_bink_data_with_sage(bink):
|
||||
curve = bink['curve']
|
||||
bink_header = bink['header']
|
||||
F = GF(curve['p'])
|
||||
# warnx("offs = %d, nb = %d, p = %x" % (offs, nb, p))
|
||||
a = F(curve['a'])
|
||||
b = F(curve['b'])
|
||||
bx = F(curve['g']['x'])
|
||||
by = F(curve['g']['y'])
|
||||
Kx = F(curve['pub']['x'])
|
||||
Ky = F(curve['pub']['y'])
|
||||
|
||||
E = EllipticCurve(F, [0, 0, 0, a, b])
|
||||
# warnx(E)
|
||||
B = E(bx, by)
|
||||
K = E(Kx, Ky)
|
||||
|
||||
# If we get here, we know B and K are on the curve.
|
||||
# Now get the order of the curve and then factorize it.
|
||||
|
||||
n = E.order()
|
||||
# warnx("n = %d, now factoring..." % n)
|
||||
# Find L by just trying if any of the factors in f yield the point at infinity
|
||||
factors = []
|
||||
|
||||
ok, values = rfactor(n, bink_header['hashlen'], B)
|
||||
while not ok:
|
||||
for value in values:
|
||||
ok, nl = rfactor(value, bink_header['keysize'], B)
|
||||
if ok:
|
||||
L = nl[0]
|
||||
break
|
||||
values.extend(nl)
|
||||
|
||||
factors = [n // L, L]
|
||||
|
||||
# warnx(factors)
|
||||
# warnx("Reduce the result of ECDLP Solver modulo %d" % L)
|
||||
# warnx("\n\njob input:\n\n")
|
||||
|
||||
bink['curve']['n'] = L
|
||||
|
||||
solver_input = ''
|
||||
solver_input += 'GF := GF(%d);\n' % curve['p']
|
||||
solver_input += 'E := EllipticCurve([GF|%d,%d]);\n' % (curve['a'], curve['b'])
|
||||
solver_input += 'G := E![%d,%d];\n' % (curve['g']['x'], curve['g']['y'])
|
||||
solver_input += 'K := E![%d,%d];\n' % (curve['pub']['x'], curve['pub']['y'])
|
||||
solver_input += '/*\n'
|
||||
solver_input += 'FactorCount:=%d;\n' % len(factors)
|
||||
for f in factors:
|
||||
solver_input += '%d;\n' % f
|
||||
solver_input += '*/'
|
||||
|
||||
bink['solver_input'] = solver_input
|
||||
|
||||
return bink
|
||||
|
||||
|
||||
def parse(bink):
|
||||
bink_id_int = btoi(bink[0x00:0x04])
|
||||
bink_id = "%08x" % bink_id_int
|
||||
|
||||
bink_header = {
|
||||
'identifier': btoi(bink[0x00:0x04]),
|
||||
'sizeof': btoi(bink[0x04:0x08]),
|
||||
'countof': btoi(bink[0x08:0x0C]),
|
||||
'checksum': btoi(bink[0x0C:0x10]),
|
||||
'version': btoi(bink[0x10:0x14]),
|
||||
'keysize': btoi(bink[0x14:0x18]),
|
||||
'hashlen': btoi(bink[0x18:0x1C]),
|
||||
'siglen': btoi(bink[0x1C:0x20]),
|
||||
}
|
||||
|
||||
bink_values = bink[0x20:]
|
||||
if bink_header["version"] > 20020420:
|
||||
bink_values = bink[0x28:]
|
||||
bink_header['authlen'] = btoi(bink[0x20:0x24])
|
||||
bink_header['pidlen'] = btoi(bink[0x24:0x28])
|
||||
|
||||
offs = bink_header["keysize"] * 4
|
||||
|
||||
curve = {
|
||||
'p': btoi(bink_values[:offs]),
|
||||
'a': btoi(bink_values[offs:offs * 2]),
|
||||
'b': btoi(bink_values[offs * 2:offs * 3]),
|
||||
'g': {'x': btoi(bink_values[offs * 3:offs * 4]), 'y': btoi(bink_values[offs * 4:offs * 5])},
|
||||
'pub': {'x': btoi(bink_values[offs * 5:offs * 6]), 'y': btoi(bink_values[offs * 6:offs * 7])},
|
||||
'n': '',
|
||||
'priv': ''
|
||||
}
|
||||
|
||||
output = {
|
||||
'bink_name': bink_id,
|
||||
'header': bink_header,
|
||||
'curve': curve,
|
||||
}
|
||||
|
||||
output = parse_bink_data_with_sage(output)
|
||||
|
||||
return output
|
|
@ -0,0 +1,95 @@
|
|||
import hashlib
|
||||
import parser.pidgen.bink as bink
|
||||
import pefile
|
||||
|
||||
|
||||
def bink_out(resource_data):
|
||||
# Process the resource data
|
||||
# bink_id = "%02x" % int.from_bytes(resource_data[0x00:0x04], 'little')
|
||||
sha1_hash = hashlib.sha1(resource_data).hexdigest()
|
||||
data = bink.parse(resource_data)
|
||||
data["sha1_hash"] = sha1_hash
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def parse(file_data):
|
||||
output = {}
|
||||
|
||||
found_bink = 0
|
||||
# attempt extracting via the PE directory
|
||||
try:
|
||||
# Load the DLL using pefile
|
||||
pe = pefile.PE(data=file_data)
|
||||
|
||||
# Loop over the resources in the DLL
|
||||
for resource_type in pe.DIRECTORY_ENTRY_RESOURCE.entries:
|
||||
if resource_type.name is not None and resource_type.name.string.decode() == 'BINK':
|
||||
found_bink = 1
|
||||
# Extract resources from the "BINK" resource type
|
||||
for resource_id_entry in resource_type.directory.entries:
|
||||
for resource_entry in resource_id_entry.directory.entries:
|
||||
resource_offset = resource_entry.data.struct.OffsetToData
|
||||
resource_size = resource_entry.data.struct.Size
|
||||
|
||||
# Access the resource data
|
||||
resource_data = pe.get_memory_mapped_image()[resource_offset: resource_offset + resource_size]
|
||||
|
||||
bink_id = "%08x" % int.from_bytes(resource_data[0x00:0x04], 'little')
|
||||
output[bink_id] = bink_out(resource_data)
|
||||
|
||||
# Close the PE file
|
||||
pe.close()
|
||||
except pefile.PEFormatError as e:
|
||||
found_bink = 0
|
||||
except AttributeError as e:
|
||||
found_bink = 0
|
||||
|
||||
# attempt a string search
|
||||
if found_bink == 0:
|
||||
|
||||
string_1998 = b'\xAE\xDF\x30\x01'
|
||||
string_2002 = b'\xC4\x7C\x31\x01'
|
||||
entries = {}
|
||||
for i in range(len(file_data) - 3):
|
||||
if ((file_data[i:i + 4] == string_1998 and i + 0x170 < len(file_data) and (
|
||||
file_data[i + 0x170:i + 0x170 + 4] == string_1998)) or (
|
||||
file_data[i:i + 4] == string_1998 and i - 0x170 > 0 and (
|
||||
file_data[i - 0x170:i - 0x170 + 4] == string_1998))):
|
||||
start = i - 16
|
||||
end = start + int.from_bytes(file_data[start + 4:start + 8], 'little') + 4
|
||||
entries[start] = {
|
||||
"Type": "BINK1998",
|
||||
"StartAddress": start,
|
||||
"EndAddress": end
|
||||
}
|
||||
|
||||
if ((file_data[i:i + 4] == string_1998 and i + 0x180 < len(file_data) and (
|
||||
file_data[i + 0x180:i + 0x180 + 4] == string_1998)) or (
|
||||
file_data[i:i + 4] == string_1998 and i - 0x180 > 0 and (
|
||||
file_data[i - 0x180:i - 0x180 + 4] == string_1998))):
|
||||
start = i - 16
|
||||
end = start + int.from_bytes(file_data[start + 4:start + 8], 'little') + 4
|
||||
entries[start] = {
|
||||
"Type": "BINK1998",
|
||||
"StartAddress": start,
|
||||
"EndAddress": end
|
||||
}
|
||||
|
||||
elif file_data[i:i + 4] == string_2002 and i + 0x1E8 < len(file_data) and (
|
||||
file_data[i + 0x1E8:i + 0x1E8 + 4] == string_2002):
|
||||
start = i - 16
|
||||
end = start + int.from_bytes(file_data[start + 4:start + 8], 'little') + 4
|
||||
entries[start] = {
|
||||
"Type": "BINK2002",
|
||||
"StartAddress": start,
|
||||
"EndAddress": end
|
||||
}
|
||||
|
||||
if len(entries) != 0:
|
||||
for key, value in entries.items():
|
||||
bink_data = file_data[key:value['EndAddress']]
|
||||
bink_id = "%08x" % int.from_bytes(bink_data[0x00:0x04], 'little')
|
||||
output[bink_id] = bink_out(bink_data)
|
||||
|
||||
return output
|
Loading…
Reference in New Issue