58 Commits

Author SHA1 Message Date
ElevenNotes
f4f1ab656f Merge branch 'master' of https://github.com/11notes/docker-kms 2025-05-21 08:44:57 +02:00
ElevenNotes
687d4eebdc [fix] missing input version on downstream workflow for GUI 2025-05-21 08:44:45 +02:00
github-actions[bot]
a90ee477d1 github-actions[bot]: update README.md 2025-05-21 06:27:54 +00:00
ElevenNotes
274c6587ea [upgrade] to latest workflow 2025-05-21 08:20:17 +02:00
ElevenNotes
be06157c03 Merge branch 'master' of https://github.com/11notes/docker-kms 2025-05-21 07:25:49 +02:00
ElevenNotes
468118bf97 [upgrade] to latest workflow 2025-05-21 07:25:40 +02:00
github-actions[bot]
485a5524eb github-actions[bot]: update README.md 2025-05-20 13:48:05 +00:00
ElevenNotes
24b5369071 [cut] KMS_CLIENTCOUNT 2025-05-20 15:42:14 +02:00
ElevenNotes
9da9b799b3 Merge branch 'master' of https://github.com/11notes/docker-kms 2025-05-20 15:32:25 +02:00
ElevenNotes
b676412fc9 [cut] KMS_CLIENTCOUNT 2025-05-20 15:32:18 +02:00
ElevenNotes
89605118da [upgrade] 1.0.1 2025-05-20 15:32:05 +02:00
ElevenNotes
ed61e0a389 [fix] race condition 2025-05-20 15:30:35 +02:00
github-actions[bot]
7dfaf728ea github-actions[bot]: update README.md 2025-05-20 13:15:50 +00:00
ElevenNotes
e41bf5a487 Merge branch 'master' of https://github.com/11notes/docker-kms 2025-05-20 15:07:25 +02:00
ElevenNotes
638cbd9150 [upgrade] 1.0.1 2025-05-20 15:07:15 +02:00
github-actions[bot]
55853de064 github-actions[bot]: update README.md 2025-05-19 13:39:10 +00:00
ElevenNotes
fce33aa489 [upgrade] to latest workflow 2025-05-19 09:02:11 +02:00
ElevenNotes
b9dd62fa54 [feature] add ARM v7 2025-05-19 09:01:59 +02:00
ElevenNotes
7acd95278f Merge branch 'master' of https://github.com/11notes/docker-kms 2025-05-05 19:48:53 +02:00
ElevenNotes
f254a289c2 [upgrade] to latest workflows 2025-05-05 19:48:44 +02:00
github-actions[bot]
727bf1f243 github-actions[bot]: update README.md 2025-05-05 09:03:42 +00:00
ElevenNotes
2dcd91990a [upgrade] switch to fork with semver 2025-05-05 10:58:28 +02:00
ElevenNotes
7519a01ba5 Merge branch 'master' of https://github.com/11notes/docker-kms 2025-05-05 10:41:09 +02:00
ElevenNotes
b4f0d240df [upgrade] switch to https://github.com/11notes/fork-py-kms with semver 2025-05-05 10:41:01 +02:00
github-actions[bot]
cdb5a78fb4 github-actions[bot]: update README.md 2025-05-02 08:31:31 +00:00
ElevenNotes
4c77d9218e Merge branch 'master' of https://github.com/11notes/docker-kms 2025-05-02 10:27:28 +02:00
ElevenNotes
c0bf59835e [fix] invalidate cache 2025-05-02 10:27:18 +02:00
github-actions[bot]
543a33b1bf github-actions[bot]: update README.md 2025-05-02 08:18:33 +00:00
ElevenNotes
cc8b9eb8ec Merge branch 'master' of https://github.com/11notes/docker-kms 2025-05-02 10:14:22 +02:00
ElevenNotes
49b56ac50b [fix] upgrade all BUT pip 2025-05-02 10:13:56 +02:00
github-actions[bot]
84c8141758 github-actions[bot]: update README.md 2025-05-02 08:02:46 +00:00
ElevenNotes
e8aa27002d [fix] UID/GID defaults 2025-05-02 09:57:31 +02:00
ElevenNotes
c42936bf8c [fix] --break-system-packages 2025-05-02 09:52:22 +02:00
ElevenNotes
bad0decb4a Merge branch 'master' of https://github.com/11notes/docker-kms 2025-05-02 09:49:47 +02:00
ElevenNotes
914dacaaf5 [fix] no-cache-dir 2025-05-02 09:49:38 +02:00
github-actions[bot]
6ae34d7b40 github-actions[bot]: update README.md 2025-05-02 07:45:50 +00:00
ElevenNotes
98dd10e9db Merge branch 'master' of https://github.com/11notes/docker-kms 2025-05-02 09:41:26 +02:00
ElevenNotes
46a338a6eb [fix] upgrade 2025-05-02 09:41:17 +02:00
github-actions[bot]
bb7d6b68ce github-actions[bot]: update README.md 2025-05-02 07:27:31 +00:00
ElevenNotes
06b86cbc27 [upgrade] to latest workflows 2025-05-02 09:17:38 +02:00
ElevenNotes
f9031c3b01 Merge branch 'master' of https://github.com/11notes/docker-kms 2025-05-02 08:55:46 +02:00
ElevenNotes
5453f6d93a updated workflow 2025-03-10 07:08:36 +01:00
github-actions[bot]
607ebb9cf7 auto update README.md 2025-03-07 11:11:30 +00:00
ElevenNotes
62b10178d3 Merge branch 'master' of https://github.com/11notes/docker-kms 2025-03-07 12:08:57 +01:00
ElevenNotes
74f3f1a6d8 [fix] semver.length 2025-03-07 12:08:43 +01:00
github-actions[bot]
9da23cfa1f auto update README.md 2025-03-07 11:03:55 +00:00
ElevenNotes
88106c5ab3 Merge branch 'master' of https://github.com/11notes/docker-kms 2025-03-07 12:01:02 +01:00
ElevenNotes
3c49769856 [upgrade] docker.yml workflow to new javascript version 2025-03-07 12:00:52 +01:00
github-actions[bot]
0731c67061 auto update README.md 2025-02-21 05:56:22 +00:00
ElevenNotes
5ad13ddfeb [feature] sql_get_all default sort by lastRequestTime DESC 2025-02-21 06:51:21 +01:00
ElevenNotes
3045fea5a5 [cut] no more static RELEASE.md 2025-02-20 06:53:12 +01:00
ElevenNotes
98df1f7f0a [feature] new release workflow (no more static RELEASE.md) 2025-02-20 06:52:42 +01:00
ElevenNotes
803d20d5e0 Merge branch 'master' of https://github.com/11notes/docker-kms 2025-02-19 11:25:18 +01:00
ElevenNotes
cb4531c479 add run-name 2025-02-19 11:25:08 +01:00
github-actions[bot]
e340cb2fd5 update README.md 2025-02-19 10:09:40 +00:00
github-actions[bot]
6be75ef815 update README.md 2025-02-19 09:53:13 +00:00
ElevenNotes
26c465e656 Merge branch 'master' of https://github.com/11notes/docker-kms 2025-02-19 10:50:13 +01:00
github-actions[bot]
ea186dd607 update README.md 2025-02-19 08:12:23 +00:00
20 changed files with 637 additions and 2255 deletions

View File

@@ -1,6 +1,7 @@
# default
.git*
*.md
LICENSE
img/
maintain/
project*
LICENSE
*.md
img/
node_modules/

3
.gitattributes vendored
View File

@@ -1,2 +1,3 @@
# Auto detect text files and perform LF normalization
# default
* text=auto
*.sh eol=lf

115
.github/workflows/cron.update.yml vendored Normal file
View File

@@ -0,0 +1,115 @@
name: cron-update
on:
workflow_dispatch:
schedule:
- cron: "0 5 * * *"
jobs:
cron-update:
runs-on: ubuntu-latest
permissions:
actions: read
contents: write
steps:
- name: init / checkout
uses: actions/checkout@85e6279cec87321a52edac9c87bce653a07cf6c2
with:
ref: 'master'
fetch-depth: 0
- name: cron-update / get latest version
run: |
echo "LATEST_VERSION=$(curl -s https://api.github.com/repos/11notes/fork-py-kms/releases/latest | jq -r '.tag_name' | sed 's/v//')" >> "${GITHUB_ENV}"
echo "LATEST_TAG=$(git describe --abbrev=0 --tags `git rev-list --tags --max-count=1` | sed 's/v//')" >> "${GITHUB_ENV}"
- name: cron-update / setup node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
node-version: '20'
- run: npm i semver
- name: cron-update / compare latest with current version
uses: actions/github-script@62c3794a3eb6788d9a2a72b219504732c0c9a298
with:
script: |
const { existsSync, readFileSync, writeFileSync } = require('node:fs');
const { resolve } = require('node:path');
const { inspect } = require('node:util');
const semver = require('semver')
const repository = {dot:{}};
try{
const path = resolve('.json');
if(existsSync(path)){
try{
repository.dot = JSON.parse(readFileSync(path).toString());
}catch(e){
throw new Error('could not parse .json');
}
}else{
throw new Error('.json does not exist');
}
}catch(e){
core.setFailed(e);
}
const latest = semver.valid(semver.coerce('${{ env.LATEST_VERSION }}'));
const current = semver.valid(semver.coerce(repository.dot.semver.version));
const tag = semver.valid(semver.coerce('${{ env.LATEST_TAG }}'));
if(latest && latest !== current){
core.info(`new ${semver.diff(current, latest)} release found (${latest})!`)
repository.dot.semver.version = latest;
if(tag){
core.exportVariable('WORKFLOW_NEW_TAG', semver.inc(tag, semver.diff(current, latest)));
}
if(repository.dot.semver?.latest){
repository.dot.semver.latest = repository.dot.semver.version;
}
if(repository.dot?.readme?.comparison?.image){
repository.dot.readme.comparison.image = repository.dot.readme.comparison.image.replace(current, repository.dot.semver.version);
}
try{
writeFileSync(resolve('.json'), JSON.stringify(repository.dot, null, 2));
core.exportVariable('WORKFLOW_AUTO_UPDATE', true);
}catch(e){
core.setFailed(e);
}
}else{
core.info('no new release found');
}
core.info(inspect(repository.dot, {showHidden:false, depth:null, colors:true}));
- name: cron-update / checkout
id: checkout
if: env.WORKFLOW_AUTO_UPDATE == 'true'
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add .json
git commit -m "[upgrade] ${{ env.LATEST_VERSION }}"
git push origin HEAD:master
- name: cron-update / tag
if: env.WORKFLOW_AUTO_UPDATE == 'true' && steps.checkout.outcome == 'success'
run: |
SHA256=$(git rev-list --branches --max-count=1)
git tag -a v${{ env.WORKFLOW_NEW_TAG }} -m "v${{ env.WORKFLOW_NEW_TAG }}" ${SHA256}
git push --follow-tags
- name: cron-update / build docker image
if: env.WORKFLOW_AUTO_UPDATE == 'true' && steps.checkout.outcome == 'success'
uses: the-actions-org/workflow-dispatch@3133c5d135c7dbe4be4f9793872b6ef331b53bc7
with:
workflow: docker.yml
wait-for-completion: false
token: "${{ secrets.REPOSITORY_TOKEN }}"
inputs: '{ "release":"true", "readme":"true" }'
ref: "v${{ env.WORKFLOW_NEW_TAG }}"

View File

@@ -1,8 +1,26 @@
name: docker
run-name: ${{ inputs.run-name }}
on:
workflow_dispatch:
inputs:
run-name:
description: 'set run-name for workflow (multiple calls)'
type: string
required: false
default: 'docker'
runs-on:
description: 'set runs-on for workflow (github or selfhosted)'
type: string
required: false
default: 'ubuntu-22.04'
build:
description: 'set WORKFLOW_BUILD'
required: false
default: 'true'
release:
description: 'set WORKFLOW_GITHUB_RELEASE'
required: false
@@ -13,193 +31,395 @@ on:
required: false
default: 'false'
image:
description: 'set IMAGE'
required: false
uid:
description: 'set IMAGE_UID'
required: false
gid:
description: 'set IMAGE_GID'
required: false
semverprefix:
description: 'prefix for semver tags'
required: false
semversuffix:
description: 'suffix for semver tags'
etc:
description: 'base64 encoded json string'
required: false
jobs:
docker:
runs-on: ubuntu-22.04
runs-on: ${{ inputs.runs-on }}
timeout-minutes: 1440
services:
registry:
image: registry:2
ports:
- 5000:5000
permissions:
actions: read
contents: write
packages: write
security-events: write
steps:
- name: init / checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
ref: master
- name: init / inputs to env
if: github.event_name == 'workflow_dispatch'
run: |
cat << 'EOF' > .inputs
${{ toJSON(github.event.inputs) }}
EOF
for KEY in $(cat .inputs | jq --raw-output 'keys[]' | tr '\n' ' '); do echo "input_$(echo ${KEY} | tr '[:upper:]' '[:lower:]')=$(cat .inputs | jq --raw-output '.'${KEY}'')" >> $GITHUB_ENV; done
- name: init / .json to env
uses: rgarcia-phi/json-to-variables@9835d537368468c4e4de5254dc3efeadda183793
with:
filename: '.json'
ref: ${{ github.ref_name }}
fetch-depth: 0
- name: init / setup environment
run: |
: # set image
LOCAL_IMAGE=${json_image}
if [ ! -z ${input_image} ]; then LOCAL_IMAGE=${input_image}; fi
echo "IMAGE=${LOCAL_IMAGE}" >> $GITHUB_ENV
uses: actions/github-script@62c3794a3eb6788d9a2a72b219504732c0c9a298
with:
script: |
const { existsSync, readFileSync } = require('node:fs');
const { resolve } = require('node:path');
const { inspect } = require('node:util');
const { Buffer } = require('node:buffer');
const inputs = `${{ toJSON(github.event.inputs) }}`;
const opt = {input:{}, dot:{}};
: # set defaults
echo "IMAGE_ARCH=${json_arch:-linux/amd64,linux/arm64}" >> $GITHUB_ENV
echo "WORKFLOW_GITHUB_RELEASE=${input_release:-true}" >> $GITHUB_ENV;
echo "WORKFLOW_GITHUB_README=${input_readme:-true}" >> $GITHUB_ENV;
echo "WORKFLOW_GRYPE_SCAN=${json_grype_scan:-true}" >> $GITHUB_ENV;
echo "WORKFLOW_GRYPE_SEVERITY_CUTOFF=${json_grype_severity:-high}" >> $GITHUB_ENV;
try{
if(inputs.length > 0){
opt.input = JSON.parse(inputs);
if(opt.input?.etc){
opt.input.etc = JSON.parse(Buffer.from(opt.input.etc, 'base64').toString('ascii'));
}
}
}catch(e){
core.warning('could not parse github.event.inputs');
}
: # create tags for semver, stable and other shenanigans
LOCAL_SHA=$(git rev-parse --short HEAD)
LOCAL_SEMVER_MAJOR=$(awk -F. '{ print $1 }' <<< ${json_semver_version})
LOCAL_SEMVER_MINOR=$(awk -F. '{ print $2 }' <<< ${json_semver_version})
LOCAL_SEMVER_PATCH=$(awk -F. '{ print $3 }' <<< ${json_semver_version})
LOCAL_SEMVER_PREFIX=""
LOCAL_SEMVER_SUFFIX=""
LOCAL_SEMVER_RC=""
LOCAL_TAGS="${LOCAL_IMAGE}:${LOCAL_SHA}"
if [ ! -z ${input_semverprefix} ]; then LOCAL_SEMVER_PREFIX="${input_semverprefix}-"; fi
if [ ! -z ${input_semversuffix} ]; then LOCAL_SEMVER_SUFFIX="-${input_semversuffix}"; fi
if [ ! -z ${json_semver_rc} ]; then LOCAL_SEMVER_RC="${json_semver_rc}"; fi
if [ ! -z ${LOCAL_SEMVER_MAJOR} ]; then LOCAL_TAGS="${LOCAL_TAGS},${LOCAL_IMAGE}:${LOCAL_SEMVER_PREFIX}${LOCAL_SEMVER_MAJOR}${LOCAL_SEMVER_SUFFIX}"; fi
if [ ! -z ${LOCAL_SEMVER_MINOR} ]; then LOCAL_TAGS="${LOCAL_TAGS},${LOCAL_IMAGE}:${LOCAL_SEMVER_PREFIX}${LOCAL_SEMVER_MAJOR}.${LOCAL_SEMVER_MINOR}${LOCAL_SEMVER_SUFFIX}"; fi
if [ ! -z ${LOCAL_SEMVER_PATCH} ]; then LOCAL_TAGS="${LOCAL_TAGS},${LOCAL_IMAGE}:${LOCAL_SEMVER_PREFIX}${LOCAL_SEMVER_MAJOR}.${LOCAL_SEMVER_MINOR}.${LOCAL_SEMVER_PATCH}${LOCAL_SEMVER_SUFFIX}"; fi
if echo "${LOCAL_TAGS}" | grep -q "${json_semver_stable}" ; then LOCAL_TAGS="${LOCAL_TAGS},${LOCAL_IMAGE}:${LOCAL_SEMVER_PREFIX}stable${LOCAL_SEMVER_SUFFIX}"; fi
if echo "${LOCAL_TAGS}" | grep -q "${json_semver_latest}" ; then LOCAL_TAGS="${LOCAL_TAGS},${LOCAL_IMAGE}:${LOCAL_SEMVER_PREFIX}latest${LOCAL_SEMVER_SUFFIX}"; fi
if [ ! -z ${json_semver_tags} ]; then SPECIAL_LOCAL_TAGS=$(echo ${json_semver_tags} | sed 's/,/ /g'); for LOCAL_TAG in ${json_semver_tags}; do LOCAL_TAGS="${LOCAL_TAGS},${LOCAL_IMAGE}:${LOCAL_SEMVER_PREFIX}${LOCAL_TAG}${LOCAL_SEMVER_SUFFIX}"; done; fi
echo "IMAGE_TAGS=${LOCAL_TAGS}" >> $GITHUB_ENV
try{
const path = resolve('.json');
if(existsSync(path)){
try{
opt.dot = JSON.parse(readFileSync(path).toString());
}catch(e){
throw new Error('could not parse .json');
}
}else{
throw new Error('.json does not exist');
}
}catch(e){
core.setFailed(e);
}
: # if for whatever reason UID/GID must be changed at build time
if [ ! -z ${input_uid} ]; then echo "IMAGE_UID=${input_uid}" >> $GITHUB_ENV; else echo "IMAGE_UID=${json_uid:-1000}" >> $GITHUB_ENV; fi
if [ ! -z ${input_gid} ]; then echo "IMAGE_GID=${input_gid}" >> $GITHUB_ENV; else echo "IMAGE_GID=${json_gid:-1000}" >> $GITHUB_ENV; fi
core.info(inspect(opt, {showHidden:false, depth:null, colors:true}));
: # set rc, prefix or suffix globally for semver and version
echo "IMAGE_SEMVER_PREFIX=${LOCAL_SEMVER_PREFIX}" >> $GITHUB_ENV
echo "IMAGE_SEMVER_SUFFIX=${LOCAL_SEMVER_SUFFIX}" >> $GITHUB_ENV
echo "IMAGE_VERSION_RC=${LOCAL_SEMVER_RC}" >> $GITHUB_ENV
const docker = {
image:{
name:opt.dot.image,
arch:(opt.dot.arch || 'linux/amd64,linux/arm64'),
prefix:((opt.input?.etc?.semverprefix) ? `${opt.input?.etc?.semverprefix}-` : ''),
suffix:((opt.input?.etc?.semversuffix) ? `-${opt.input?.etc?.semversuffix}` : ''),
description:(opt.dot?.readme?.description || ''),
tags:[],
},
app:{
image:opt.dot.image,
name:opt.dot.name,
version:(opt.input?.etc?.version || opt.dot?.semver?.version),
root:opt.dot.root,
UID:(opt.input?.etc?.uid || 1000),
GID:(opt.input?.etc?.gid || 1000),
no_cache:new Date().getTime(),
},
cache:{
registry:'localhost:5000/',
},
tags:[],
};
docker.cache.name = `${docker.image.name}:${docker.image.prefix}buildcache${docker.image.suffix}`;
docker.cache.grype = `${docker.cache.registry}${docker.image.name}:${docker.image.prefix}grype${docker.image.suffix}`;
docker.app.prefix = docker.image.prefix;
docker.app.suffix = docker.image.suffix;
// setup tags
if(!opt.dot?.semver?.disable?.rolling){
docker.image.tags.push('rolling');
}
if(opt.input?.etc?.dockerfile !== 'arch.dockerfile' && opt.input?.etc?.tag){
docker.image.tags.push(`${context.sha.substring(0,7)}`);
docker.image.tags.push(opt.input.etc.tag);
docker.image.tags.push(`${opt.input.etc.tag}-${docker.app.version}`);
docker.cache.name = `${docker.image.name}:buildcache-${opt.input.etc.tag}`;
}else{
const semver = docker.app.version.split('.');
docker.image.tags.push(`${context.sha.substring(0,7)}`);
if(Array.isArray(semver)){
if(semver.length >= 1) docker.image.tags.push(`${semver[0]}`);
if(semver.length >= 2) docker.image.tags.push(`${semver[0]}.${semver[1]}`);
if(semver.length >= 3) docker.image.tags.push(`${semver[0]}.${semver[1]}.${semver[2]}`);
}
if(opt.dot?.semver?.stable && new RegExp(opt.dot?.semver.stable, 'ig').test(docker.image.tags.join(','))) docker.image.tags.push('stable');
if(opt.dot?.semver?.latest && new RegExp(opt.dot?.semver.latest, 'ig').test(docker.image.tags.join(','))) docker.image.tags.push('latest');
}
for(const tag of docker.image.tags){
docker.tags.push(`${docker.image.name}:${docker.image.prefix}${tag}${docker.image.suffix}`);
docker.tags.push(`ghcr.io/${docker.image.name}:${docker.image.prefix}${tag}${docker.image.suffix}`);
docker.tags.push(`quay.io/${docker.image.name}:${docker.image.prefix}${tag}${docker.image.suffix}`);
}
// setup build arguments
if(opt.input?.etc?.build?.args){
for(const arg in opt.input.etc.build.args){
docker.app[arg] = opt.input.etc.build.args[arg];
}
}
if(opt.dot?.build?.args){
for(const arg in opt.dot.build.args){
docker.app[arg] = opt.dot.build.args[arg];
}
}
const arguments = [];
for(const argument in docker.app){
arguments.push(`APP_${argument.toUpperCase()}=${docker.app[argument]}`);
}
// export to environment
core.exportVariable('DOCKER_CACHE_REGISTRY', docker.cache.registry);
core.exportVariable('DOCKER_CACHE_NAME', docker.cache.name);
core.exportVariable('DOCKER_CACHE_GRYPE', docker.cache.grype);
core.exportVariable('DOCKER_IMAGE_NAME', docker.image.name);
core.exportVariable('DOCKER_IMAGE_ARCH', docker.image.arch);
core.exportVariable('DOCKER_IMAGE_TAGS', docker.tags.join(','));
core.exportVariable('DOCKER_IMAGE_DESCRIPTION', docker.image.description);
core.exportVariable('DOCKER_IMAGE_ARGUMENTS', arguments.join("\r\n"));
core.exportVariable('DOCKER_IMAGE_DOCKERFILE', opt.input?.etc?.dockerfile || 'arch.dockerfile');
core.exportVariable('WORKFLOW_BUILD', (opt.input?.build === undefined) ? false : opt.input.build);
core.exportVariable('WORKFLOW_CREATE_RELEASE', (opt.input?.release === undefined) ? false : opt.input.release);
core.exportVariable('WORKFLOW_CREATE_README', (opt.input?.readme === undefined) ? false : opt.input.readme);
core.exportVariable('WORKFLOW_GRYPE_FAIL_ON_SEVERITY', (opt.dot?.grype?.fail === undefined) ? true : opt.dot.grype.fail);
core.exportVariable('WORKFLOW_GRYPE_SEVERITY_CUTOFF', (opt.dot?.grype?.severity || 'high'));
if(opt.dot?.readme?.comparison){
core.exportVariable('WORKFLOW_CREATE_COMPARISON', true);
core.exportVariable('WORKFLOW_CREATE_COMPARISON_FOREIGN_IMAGE', opt.dot.readme.comparison.image);
core.exportVariable('WORKFLOW_CREATE_COMPARISON_IMAGE', `${docker.image.name}:${docker.app.version}`);
}
# DOCKER
- name: docker / login to hub
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772
with:
username: 11notes
password: ${{ secrets.DOCKER_TOKEN }}
- name: github / login to ghcr
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772
with:
registry: ghcr.io
username: 11notes
password: ${{ secrets.GITHUB_TOKEN }}
- name: quay / login to quay
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772
with:
registry: quay.io
username: 11notes+github
password: ${{ secrets.QUAY_TOKEN }}
- name: docker / setup qemu
if: env.WORKFLOW_BUILD == 'true'
uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a
- name: docker / setup buildx
if: env.WORKFLOW_BUILD == 'true'
uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5
with:
driver-opts: network=host
- name: grype / build & push & tag
id: grype-tag
- name: docker / build & push & tag grype
if: env.WORKFLOW_BUILD == 'true'
id: docker-build
uses: docker/build-push-action@67a2d409c0a876cbe6b11854e3e25193efe4e62d
with:
context: .
file: arch.dockerfile
file: ${{ env.DOCKER_IMAGE_DOCKERFILE }}
push: true
platforms: ${{ env.IMAGE_ARCH }}
cache-from: type=registry,ref=${{ env.IMAGE }}:${{ env.IMAGE_SEMVER_PREFIX }}buildcache${{ env.IMAGE_SEMVER_SUFFIX }}
cache-to: type=registry,ref=${{ env.IMAGE }}:${{ env.IMAGE_SEMVER_PREFIX }}buildcache${{ env.IMAGE_SEMVER_SUFFIX }},mode=max,compression=zstd,force-compression=true
platforms: ${{ env.DOCKER_IMAGE_ARCH }}
cache-from: type=registry,ref=${{ env.DOCKER_CACHE_NAME }}
cache-to: type=registry,ref=${{ env.DOCKER_CACHE_REGISTRY }}${{ env.DOCKER_CACHE_NAME }},mode=max,compression=zstd,force-compression=true
build-args: |
APP_IMAGE=${{ env.IMAGE }}
APP_NAME=${{ env.json_name }}
APP_VERSION=${{ env.json_semver_version }}
APP_ROOT=${{ env.json_root }}
APP_UID=${{ env.IMAGE_UID }}
APP_GID=${{ env.IMAGE_GID }}
APP_VERSION_PREFIX=${{ env.IMAGE_SEMVER_PREFIX }}
APP_VERSION_SUFFIX=${{ env.IMAGE_SEMVER_SUFFIX }}
APP_VERSION_RC=${{ env.IMAGE_VERSION_RC }}
APP_NO_CACHE=$(date +%s)
${{ env.DOCKER_IMAGE_ARGUMENTS }}
tags: |
${{ env.IMAGE }}:${{ env.IMAGE_SEMVER_PREFIX }}grype${{ env.IMAGE_SEMVER_SUFFIX }}
${{ env.DOCKER_CACHE_GRYPE }}
- name: grype / scan
if: env.WORKFLOW_GRYPE_SCAN == 'true'
id: grype-scan
uses: anchore/scan-action@abae793926ec39a78ab18002bc7fc45bbbd94342
if: env.WORKFLOW_BUILD == 'true'
id: grype
uses: anchore/scan-action@dc6246fcaf83ae86fcc6010b9824c30d7320729e
with:
image: ${{ env.IMAGE }}:${{ env.IMAGE_SEMVER_PREFIX }}grype${{ env.IMAGE_SEMVER_SUFFIX }}
image: ${{ env.DOCKER_CACHE_GRYPE }}
fail-build: ${{ env.WORKFLOW_GRYPE_FAIL_ON_SEVERITY }}
severity-cutoff: ${{ env.WORKFLOW_GRYPE_SEVERITY_CUTOFF }}
by-cve: true
output-format: 'sarif'
by-cve: true
cache-db: true
- name: grype / delete tag
if: steps.grype-tag.outcome == 'success'
run: |
curl --request DELETE \
--url https://hub.docker.com/v2/repositories/${{ env.IMAGE }}/tags/${{ env.IMAGE_SEMVER_PREFIX }}grype${{ env.IMAGE_SEMVER_SUFFIX }}/ \
--header 'authorization: jwt ${{ secrets.DOCKER_TOKEN }}' \
--header 'content-type: application/json' \
--fail
- name: codeql / upload
id: codeql-upload
if: steps.grype-scan.outcome == 'success'
uses: github/codeql-action/upload-sarif@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169
- name: grype / fail
if: env.WORKFLOW_BUILD == 'true' && (failure() || steps.grype.outcome == 'failure')
uses: anchore/scan-action@dc6246fcaf83ae86fcc6010b9824c30d7320729e
with:
sarif_file: ${{ steps.grype-scan.outputs.sarif }}
wait-for-processing: false
category: grype
image: ${{ env.DOCKER_CACHE_GRYPE }}
fail-build: false
severity-cutoff: ${{ env.WORKFLOW_GRYPE_SEVERITY_CUTOFF }}
output-format: 'table'
by-cve: true
cache-db: true
- name: docker / build & push
if: env.WORKFLOW_BUILD == 'true'
uses: docker/build-push-action@67a2d409c0a876cbe6b11854e3e25193efe4e62d
with:
context: .
file: arch.dockerfile
file: ${{ env.DOCKER_IMAGE_DOCKERFILE }}
push: true
sbom: true
provenance: mode=max
platforms: ${{ env.IMAGE_ARCH }}
cache-from: type=registry,ref=${{ env.IMAGE }}:${{ env.IMAGE_SEMVER_PREFIX }}buildcache${{ env.IMAGE_SEMVER_SUFFIX }}
cache-to: type=registry,ref=${{ env.IMAGE }}:${{ env.IMAGE_SEMVER_PREFIX }}buildcache${{ env.IMAGE_SEMVER_SUFFIX }},mode=max,compression=zstd,force-compression=true
platforms: ${{ env.DOCKER_IMAGE_ARCH }}
cache-from: type=registry,ref=${{ env.DOCKER_CACHE_REGISTRY }}${{ env.DOCKER_CACHE_NAME }}
cache-to: type=registry,ref=${{ env.DOCKER_CACHE_NAME }},mode=max,compression=zstd,force-compression=true
build-args: |
APP_IMAGE=${{ env.IMAGE }}
APP_NAME=${{ env.json_name }}
APP_VERSION=${{ env.json_semver_version }}
APP_ROOT=${{ env.json_root }}
APP_UID=${{ env.IMAGE_UID }}
APP_GID=${{ env.IMAGE_GID }}
APP_VERSION_PREFIX=${{ env.IMAGE_SEMVER_PREFIX }}
APP_VERSION_SUFFIX=${{ env.IMAGE_SEMVER_SUFFIX }}
APP_VERSION_RC=${{ env.IMAGE_VERSION_RC }}
APP_NO_CACHE=$(date +%s)
${{ env.DOCKER_IMAGE_ARGUMENTS }}
tags: |
${{ env.IMAGE_TAGS }}
${{ env.DOCKER_IMAGE_TAGS }}
- name: github / create release notes
if: env.WORKFLOW_GITHUB_RELEASE == 'true' && hashFiles('RELEASE.md') != ''
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# RELEASE
- name: github / release / log
continue-on-error: true
id: git-log
run: |
gh release create ${{ github.ref_name }} -F RELEASE.md
LOCAL_LAST_TAG=$(git describe --abbrev=0 --tags `git rev-list --tags --skip=1 --max-count=1`)
echo "using last tag: ${LOCAL_LAST_TAG}"
LOCAL_COMMITS=$(git log ${LOCAL_LAST_TAG}..HEAD --oneline)
EOF=$(dd if=/dev/urandom bs=15 count=1 status=none | base64)
echo "commits<<${EOF}" >> ${GITHUB_OUTPUT}
echo "${LOCAL_COMMITS}" >> ${GITHUB_OUTPUT}
echo "${EOF}" >> ${GITHUB_OUTPUT}
- name: github / release / markdown
if: env.WORKFLOW_CREATE_RELEASE == 'true' && steps.git-log.outcome == 'success'
id: git-release
uses: 11notes/action-docker-release@v1
# WHY IS THIS ACTION NOT SHA256 PINNED? SECURITY MUCH?!?!?!
# ---------------------------------------------------------------------------------
# the next step "github / release / create" creates a new release based on the code
# in the repo. This code is not modified and can't be modified by this action.
# It does create the markdown for the release, which could be abused, but to what
# extend? Adding a link to a malicious repo?
with:
git_log: ${{ steps.git-log.outputs.commits }}
- name: github / release / create
if: env.WORKFLOW_CREATE_RELEASE == 'true' && steps.git-release.outcome == 'success'
uses: actions/create-release@4c11c9fe1dcd9636620a16455165783b20fc7ea0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ github.ref }}
release_name: ${{ github.ref }}
body: ${{ steps.git-release.outputs.release }}
draft: false
prerelease: false
# LICENSE
- name: license / update year
continue-on-error: true
uses: actions/github-script@62c3794a3eb6788d9a2a72b219504732c0c9a298
with:
script: |
const { existsSync, readFileSync, writeFileSync } = require('node:fs');
const { resolve } = require('node:path');
const file = 'LICENSE';
const year = new Date().getFullYear();
try{
const path = resolve(file);
if(existsSync(path)){
let license = readFileSync(file).toString();
if(!new RegExp(`Copyright \\(c\\) ${year} 11notes`, 'i').test(license)){
license = license.replace(/Copyright \(c\) \d{4} /i, `Copyright (c) ${new Date().getFullYear()} `);
writeFileSync(path, license);
}
}else{
throw new Error(`file ${file} does not exist`);
}
}catch(e){
core.setFailed(e);
}
# README
- name: github / checkout HEAD
continue-on-error: true
run: |
git checkout HEAD
- name: docker / setup comparison images
if: env.WORKFLOW_CREATE_COMPARISON == 'true'
continue-on-error: true
run: |
docker image pull ${{ env.WORKFLOW_CREATE_COMPARISON_IMAGE }}
docker image ls --filter "reference=${{ env.WORKFLOW_CREATE_COMPARISON_IMAGE }}" --format json | jq --raw-output '.Size' &> ./comparison.size0.log
docker image pull ${{ env.WORKFLOW_CREATE_COMPARISON_FOREIGN_IMAGE }}
docker image ls --filter "reference=${{ env.WORKFLOW_CREATE_COMPARISON_FOREIGN_IMAGE }}" --format json | jq --raw-output '.Size' &> ./comparison.size1.log
docker run --entrypoint "/bin/sh" --rm ${{ env.WORKFLOW_CREATE_COMPARISON_FOREIGN_IMAGE }} -c id &> ./comparison.id.log
- name: github / create README.md
id: github-readme
continue-on-error: true
if: env.WORKFLOW_CREATE_README == 'true'
uses: 11notes/action-docker-readme@v1
# WHY IS THIS ACTION NOT SHA256 PINNED? SECURITY MUCH?!?!?!
# ---------------------------------------------------------------------------------
# the next step "github / commit & push" only adds the README and LICENSE as well as
# compose.yaml to the repository. This does not pose a security risk if this action
# would be compromised. The code of the app can't be changed by this action. Since
# only the files mentioned are commited to the repo. Sure, someone could make a bad
# compose.yaml, but since this serves only as an example I see no harm in that.
with:
sarif_file: ${{ steps.grype.outputs.sarif }}
build_output_metadata: ${{ steps.docker-build.outputs.metadata }}
- name: docker / push README.md to docker hub
continue-on-error: true
if: steps.github-readme.outcome == 'success' && hashFiles('README_NONGITHUB.md') != ''
uses: christian-korneck/update-container-description-action@d36005551adeaba9698d8d67a296bd16fa91f8e8
env:
DOCKER_USER: 11notes
DOCKER_PASS: ${{ secrets.DOCKER_TOKEN }}
with:
destination_container_repo: ${{ env.DOCKER_IMAGE_NAME }}
provider: dockerhub
short_description: ${{ env.DOCKER_IMAGE_DESCRIPTION }}
readme_file: 'README_NONGITHUB.md'
- name: github / commit & push
continue-on-error: true
if: steps.github-readme.outcome == 'success' && hashFiles('README.md') != ''
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add README.md
if [ -f compose.yaml ]; then
git add compose.yaml
fi
if [ -f LICENSE ]; then
git add LICENSE
fi
git commit -m "github-actions[bot]: update README.md"
git push origin HEAD:master
# REPOSITORY SETTINGS
- name: github / update description and set repo defaults
run: |
curl --request PATCH \
@@ -207,7 +427,7 @@ jobs:
--header 'authorization: Bearer ${{ secrets.REPOSITORY_TOKEN }}' \
--header 'content-type: application/json' \
--data '{
"description":"${{ env.json_readme_description }}",
"description":"${{ env.DOCKER_IMAGE_DESCRIPTION }}",
"homepage":"",
"has_issues":true,
"has_discussions":true,
@@ -215,31 +435,3 @@ jobs:
"has_wiki":false
}' \
--fail
- name: github / create README.md
if: env.WORKFLOW_GITHUB_README == 'true'
id: github-readme
uses: 11notes/action-docker-readme@v1
with:
sarif_file: ${{ steps.grype-scan.outputs.sarif }}
- name: github / commit & push
if: steps.github-readme.outcome == 'success'
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add .
git commit -m "update README.md"
git push
- name: docker / push README.md to docker hub
if: hashFiles('README.md') != ''
uses: christian-korneck/update-container-description-action@d36005551adeaba9698d8d67a296bd16fa91f8e8
env:
DOCKER_USER: 11notes
DOCKER_PASS: ${{ secrets.DOCKER_TOKEN }}
with:
destination_container_repo: ${{ env.IMAGE }}
provider: dockerhub
short_description: ${{ env.json_readme_description }}
readme_file: 'README.md'

16
.github/workflows/readme.yml vendored Normal file
View File

@@ -0,0 +1,16 @@
name: readme
on:
workflow_dispatch:
jobs:
readme:
runs-on: ubuntu-latest
steps:
- name: update README.md
uses: the-actions-org/workflow-dispatch@3133c5d135c7dbe4be4f9793872b6ef331b53bc7
with:
wait-for-completion: false
workflow: docker.yml
token: "${{ secrets.REPOSITORY_TOKEN }}"
inputs: '{ "build":"false", "release":"false", "readme":"true" }'

View File

@@ -17,17 +17,47 @@ jobs:
docker-unraid:
runs-on: ubuntu-latest
steps:
- name: init / base64 nested json
uses: actions/github-script@62c3794a3eb6788d9a2a72b219504732c0c9a298
with:
script: |
const { Buffer } = require('node:buffer');
const etc = {
semversuffix:"unraid",
uid:99,
gid:100,
};
core.exportVariable('WORKFLOW_BASE64JSON', Buffer.from(JSON.stringify(etc)).toString('base64'));
- name: build docker image for unraid community
uses: the-actions-org/workflow-dispatch@3133c5d135c7dbe4be4f9793872b6ef331b53bc7
with:
workflow: docker.yml
token: "${{ secrets.REPOSITORY_TOKEN }}"
inputs: '{ "release":"false", "readme":"false", "uid":"99", "gid":"100", "semversuffix":"unraid" }'
inputs: '{ "release":"false", "readme":"false", "run-name":"unraid", "etc":"${{ env.WORKFLOW_BASE64JSON }}" }'
kms-gui:
runs-on: ubuntu-latest
needs: docker
steps:
- name: init / base64 nested json
uses: actions/github-script@62c3794a3eb6788d9a2a72b219504732c0c9a298
with:
script: |
const { Buffer } = require('node:buffer');
(async()=>{
try{
const master = await fetch('https://raw.githubusercontent.com/11notes/docker-kms/refs/heads/master/.json');
const dot = await master.json();
const etc = {
version:dot.semver.version,
};
core.exportVariable('WORKFLOW_BASE64JSON', Buffer.from(JSON.stringify(etc)).toString('base64'));
}catch(e){
core.setFailed(`workflow failed: ${e}`);
}
})();
- name: build downstream kms gui
uses: the-actions-org/workflow-dispatch@3133c5d135c7dbe4be4f9793872b6ef331b53bc7
with:
@@ -35,12 +65,33 @@ jobs:
token: "${{ secrets.REPOSITORY_TOKEN }}"
repo: 11notes/docker-kms-gui
ref: master
inputs: '{ "release":"false", "readme":"true" }'
inputs: '{ "release":"false", "readme":"true", "etc":"${{ env.WORKFLOW_BASE64JSON }}" }'
kms-gui-unraid:
runs-on: ubuntu-latest
needs: docker-unraid
steps:
- name: init / base64 nested json
uses: actions/github-script@62c3794a3eb6788d9a2a72b219504732c0c9a298
with:
script: |
const { Buffer } = require('node:buffer');
(async()=>{
try{
const master = await fetch('https://raw.githubusercontent.com/11notes/docker-kms/refs/heads/master/.json');
const dot = await master.json();
const etc = {
version:dot.semver.version,
semversuffix:"unraid",
uid:99,
gid:100,
};
core.exportVariable('WORKFLOW_BASE64JSON', Buffer.from(JSON.stringify(etc)).toString('base64'));
}catch(e){
core.setFailed(`workflow failed: ${e}`);
}
})();
- name: build downstream kms gui for unraid community
uses: the-actions-org/workflow-dispatch@3133c5d135c7dbe4be4f9793872b6ef331b53bc7
with:
@@ -48,4 +99,4 @@ jobs:
token: "${{ secrets.REPOSITORY_TOKEN }}"
repo: 11notes/docker-kms-gui
ref: master
inputs: '{ "release":"false", "readme":"false", "uid":"99", "gid":"100", "semversuffix":"unraid" }'
inputs: '{ "release":"false", "readme":"false", "run-name":"unraid", "etc":"${{ env.WORKFLOW_BASE64JSON }}" }'

2
.gitignore vendored
View File

@@ -1 +1,3 @@
# default
maintain/
node_modules/

View File

@@ -1,4 +0,0 @@
{
"readme": "true",
"release": "true"
}

7
.json
View File

@@ -2,11 +2,10 @@
"image":"11notes/kms",
"name":"kms",
"root":"/kms",
"arch":"linux/amd64,linux/arm64,linux/arm/v7",
"semver":{
"version":"465f4d1",
"stable":"465f4d1",
"latest":"465f4d1"
"version":"1.0.1"
},
"readme":{
@@ -15,7 +14,7 @@
"image":"11notes/alpine:stable"
},
"built":{
"py-kms":"https://github.com/Py-KMS-Organization/py-kms"
"11notes/py-kms":"https://github.com/11notes/fork-py-kms"
}
}
}

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2020 11notes
Copyright (c) 2025 11notes
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -1,26 +1,13 @@
![banner](https://github.com/11notes/defaults/blob/main/static/img/banner.png?raw=true)
# ⛰️ kms
[<img src="https://img.shields.io/badge/github-source-blue?logo=github&color=040308">](https://github.com/11notes/docker-kms)![size](https://img.shields.io/docker/image-size/11notes/kms/465f4d1?color=0eb305)![version](https://img.shields.io/docker/v/11notes/kms/465f4d1?color=eb7a09)![pulls](https://img.shields.io/docker/pulls/11notes/kms?color=2b75d6)[<img src="https://img.shields.io/github/issues/11notes/docker-kms?color=7842f5">](https://github.com/11notes/docker-kms/issues)
# KMS
[<img src="https://img.shields.io/badge/github-source-blue?logo=github&color=040308">](https://github.com/11notes/docker-KMS)![5px](https://github.com/11notes/defaults/blob/main/static/img/transparent5x2px.png?raw=true)![size](https://img.shields.io/docker/image-size/11notes/kms/1.0.1?color=0eb305)![5px](https://github.com/11notes/defaults/blob/main/static/img/transparent5x2px.png?raw=true)![version](https://img.shields.io/docker/v/11notes/kms/1.0.1?color=eb7a09)![5px](https://github.com/11notes/defaults/blob/main/static/img/transparent5x2px.png?raw=true)![pulls](https://img.shields.io/docker/pulls/11notes/kms?color=2b75d6)![5px](https://github.com/11notes/defaults/blob/main/static/img/transparent5x2px.png?raw=true)[<img src="https://img.shields.io/github/issues/11notes/docker-KMS?color=7842f5">](https://github.com/11notes/docker-KMS/issues)![5px](https://github.com/11notes/defaults/blob/main/static/img/transparent5x2px.png?raw=true)![swiss_made](https://img.shields.io/badge/Swiss_Made-FFFFFF?labelColor=FF0000&logo=data:image/svg%2bxml;base64,PHN2ZyB2ZXJzaW9uPSIxIiB3aWR0aD0iNTEyIiBoZWlnaHQ9IjUxMiIgdmlld0JveD0iMCAwIDMyIDMyIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciPjxwYXRoIGQ9Im0wIDBoMzJ2MzJoLTMyeiIgZmlsbD0iI2YwMCIvPjxwYXRoIGQ9Im0xMyA2aDZ2N2g3djZoLTd2N2gtNnYtN2gtN3YtNmg3eiIgZmlsbD0iI2ZmZiIvPjwvc3ZnPg==)
Activate any version of Windows and Office, forever
# MAIN TAGS 🏷️
These are the main tags for the image. There is also a tag for each commit and its shorthand sha256 value.
![Windows Server 2025](https://github.com/11notes/docker-KMS/blob/master/img/WindowsSRV2025.png?raw=true)
* [465f4d1](https://hub.docker.com/r/11notes/kms/tags?name=465f4d1)
* [stable](https://hub.docker.com/r/11notes/kms/tags?name=stable)
* [latest](https://hub.docker.com/r/11notes/kms/tags?name=latest)
* [465f4d1-unraid](https://hub.docker.com/r/11notes/kms/tags?name=465f4d1-unraid)
* [stable-unraid](https://hub.docker.com/r/11notes/kms/tags?name=stable-unraid)
* [latest-unraid](https://hub.docker.com/r/11notes/kms/tags?name=latest-unraid)
# UNRAID VERSION 🟠
This image supports unraid by default. Simply add **-unraid** to any tag and the image will run as 99:100 instead of 1000:1000 causing no issues on unraid. Enjoy.
![Windows Server 2025](https://github.com/11notes/docker-kms/blob/master/img/WindowsSRV2025.png?raw=true)
![Windows 11 LTSC 2024](https://github.com/11notes/docker-kms/blob/master/img/Windows11ENTLTSC.png?raw=true)
![Web GUI](https://github.com/11notes/docker-kms/blob/master/img/webGUICustomIcon.png?raw=true)
![Web GUI](https://github.com/11notes/docker-KMS/blob/master/img/webGUICustomIcon.png?raw=true)
# SYNOPSIS 📖
**What can I do with this?** This image will run a KMS server you can use to activate any version of Windows and Office, forever.
@@ -54,9 +41,8 @@ Works with:
```yaml
name: "kms"
services:
kms:
image: "11notes/kms:465f4d1"
container_name: "kms"
app:
image: "11notes/kms:1.0.1"
environment:
TZ: "Europe/Zurich"
volumes:
@@ -64,16 +50,21 @@ services:
ports:
- "1688:1688/tcp"
restart: "always"
kms-gui:
image: "11notes/kms-gui:stable"
container_name: "kms-gui"
gui:
image: "11notes/kms-gui:1.0.1"
depends_on:
app:
condition: "service_healthy"
restart: true
environment:
TZ: "Europe/Zurich"
volumes:
- "var:/kms/var"
ports:
- "8080:8080/tcp"
- "3000:3000/tcp"
restart: "always"
volumes:
var:
```
@@ -111,32 +102,50 @@ slmgr /ato
| `TZ` | [Time Zone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) | |
| `DEBUG` | Will activate debug option for container image and app (if available) | |
| `KMS_LOCALE` | see Microsoft LICD specification | 1033 (en-US) |
| `KMS_CLIENTCOUNT` | client count > 25 | 26 |
| `KMS_ACTIVATIONINTERVAL` | Retry unsuccessful after N minutes | 120 (2 hours) |
| `KMS_RENEWALINTERVAL` | re-activation after N minutes | 259200 (180 days) |
| `KMS_LOGLEVEL` | CRITICAL, ERROR, WARNING, INFO, DEBUG, MININFO | INFO |
# MAIN TAGS 🏷️
These are the main tags for the image. There is also a tag for each commit and its shorthand sha256 value.
* [1.0.1](https://hub.docker.com/r/11notes/kms/tags?name=1.0.1)
* [1.0.1-unraid](https://hub.docker.com/r/11notes/kms/tags?name=1.0.1-unraid)
### There is no latest tag, what am I supposed to do about updates?
It is of my opinion that the ```:latest``` tag is super dangerous. Many times, Ive introduced **breaking** changes to my images. This would have messed up everything for some people. If you dont want to change the tag to the latest [semver](https://semver.org/), simply use the short versions of [semver](https://semver.org/). Instead of using ```:1.0.1``` you can use ```:1``` or ```:1.0```. Since on each new version these tags are updated to the latest version of the software, using them is identical to using ```:latest``` but at least fixed to a major or minor version.
If you still insist on having the bleeding edge release of this app, simply use the ```:rolling``` tag, but be warned! You will get the latest version of the app instantly, regardless of breaking changes or security issues or what so ever. You do this at your own risk!
# REGISTRIES ☁️
```
docker pull 11notes/kms:1.0.1
docker pull ghcr.io/11notes/kms:1.0.1
docker pull quay.io/11notes/kms:1.0.1
```
# UNRAID VERSION 🟠
This image supports unraid by default. Simply add **-unraid** to any tag and the image will run as 99:100 instead of 1000:1000 causing no issues on unraid. Enjoy.
# SOURCE 💾
* [11notes/kms](https://github.com/11notes/docker-kms)
* [11notes/kms](https://github.com/11notes/docker-KMS)
# PARENT IMAGE 🏛️
* [11notes/alpine:stable](https://hub.docker.com/r/11notes/alpine)
# BUILT WITH 🧰
* [py-kms](https://github.com/Py-KMS-Organization/py-kms)
* [11notes/py-kms](https://github.com/11notes/fork-py-kms)
* [11notes/util](https://github.com/11notes/docker-util)
# GENERAL TIPS 📌
* Use a reverse proxy like Traefik, Nginx, HAproxy to terminate TLS and to protect your endpoints
* Use Lets Encrypt DNS-01 challenge to obtain valid SSL certificates for your services
> [!TIP]
>* Use a reverse proxy like Traefik, Nginx, HAproxy to terminate TLS and to protect your endpoints
>* Use Lets Encrypt DNS-01 challenge to obtain valid SSL certificates for your services
* Do not expose this image to WAN! You will get notified from Microsoft via your ISP to terminate the service if you do so
* [Microsoft LICD](https://learn.microsoft.com/en-us/openspecs/office_standards/ms-oe376/6c085406-a698-4e12-9d4d-c3b0ee3dbc4a)
* Use [11notes/kms-gui](https://github.com/11notes/docker-kms-gui) if you want to see the clients you activated in a nice web GUI
# SECURITY VULNERABILITIES REPORT ⚡
| Severity | Package | Version | Fix Version | Type | Location | Data Namespace | Link |
| --- | --- | --- | --- | --- | --- | --- | --- |
| 4.7 (Medium) | linux-pam | 1.6.1-r1 | | apk | /lib/apk/db/installed | nvd:cpe | [CVE-2024-10041](https://nvd.nist.gov/vuln/detail/CVE-2024-10041) |
# ElevenNotes™
This image is provided to you at your own risk. Always make backups before updating an image to a different version. Check the [releases](https://github.com/11notes/docker-kms/releases) for breaking changes. If you have any problems with using this image simply raise an [issue](https://github.com/11notes/docker-kms/issues), thanks. If you have a question or inputs please create a new [discussion](https://github.com/11notes/docker-kms/discussions) instead of an issue. You can find all my other repositories on [github](https://github.com/11notes?tab=repositories).
*created 21.05.2025, 08:27:53 (CET)*

View File

@@ -1,2 +0,0 @@
### 🪄 Features
* add client IP to SQlite database

View File

@@ -1,15 +1,20 @@
ARG APP_UID=1000
ARG APP_GID=1000
ARG BUILD_ROOT=/git/fork-py-kms
# :: Util
FROM 11notes/util AS util
# :: Build / py-kms
FROM alpine/git AS build
ARG APP_VERSION
ARG BUILD_ROOT
RUN set -ex; \
git clone https://github.com/Py-KMS-Organization/py-kms.git -b next; \
cd /git/py-kms; \
git checkout ${APP_VERSION}; \
cp -R /git/py-kms/docker/docker-py3-kms-minimal/requirements.txt /git/py-kms/py-kms/requirements.txt; \
cp -R /git/py-kms/docker/docker-py3-kms/requirements.txt /git/py-kms/py-kms/requirements.gui.txt;
git clone https://github.com/11notes/fork-py-kms -b next; \
cd ${BUILD_ROOT}; \
git checkout v${APP_VERSION}; \
cp -R ${BUILD_ROOT}/docker/docker-py3-kms-minimal/requirements.txt ${BUILD_ROOT}/py-kms/requirements.txt; \
cp -R ${BUILD_ROOT}/docker/docker-py3-kms/requirements.txt ${BUILD_ROOT}/py-kms/requirements.gui.txt;
# :: Header
FROM 11notes/alpine:stable
@@ -22,6 +27,14 @@
ARG APP_ROOT
ARG APP_UID
ARG APP_GID
ARG APP_NO_CACHE
ARG BUILD_ROOT
# :: python image
ARG PIP_ROOT_USER_ACTION=ignore
ARG PIP_BREAK_SYSTEM_PACKAGES=1
ARG PIP_DISABLE_PIP_VERSION_CHECK=1
ARG PIP_NO_CACHE_DIR=1
# :: environment
ENV APP_IMAGE=${APP_IMAGE}
@@ -36,8 +49,8 @@
ENV KMS_LOGLEVEL="INFO"
# :: multi-stage
COPY --from=util /usr/local/bin/ /usr/local/bin
COPY --from=build /git/py-kms/py-kms/ /opt/py-kms
COPY --from=util /usr/local/bin /usr/local/bin
COPY --from=build ${BUILD_ROOT}/py-kms /opt/py-kms
# :: Run
USER root
@@ -52,15 +65,17 @@
RUN set -ex; \
mkdir -p ${APP_ROOT}/var; \
pip3 install --no-cache-dir -r /opt/py-kms/requirements.txt --break-system-packages; \
pip3 install --no-cache-dir pytz --break-system-packages; \
apk del --no-network .build;
pip3 install -r /opt/py-kms/requirements.txt; \
pip3 install pytz; \
pip3 list -o | sed 's/pip.*//' | grep . | cut -f1 -d' ' | tr " " "\n" | awk '{if(NR>=3)print}' | cut -d' ' -f1 | xargs -n1 pip3 install -U; \
apk del --no-network .build; \
rm -rf /usr/lib/python3.12/site-packages/pip;
# :: copy filesystem changes and set correct permissions
COPY ./rootfs /
RUN set -ex; \
chmod +x -R /usr/local/bin; \
chown -R 1000:1000 \
chown -R ${APP_UID}:${APP_GID} \
${APP_ROOT} \
/opt/py-kms;
@@ -75,4 +90,4 @@
HEALTHCHECK --interval=5s --timeout=2s CMD netstat -an | grep -q 1688 || exit 1
# :: Start
USER docker
USER ${APP_UID}:${APP_GID}

View File

@@ -1,8 +1,7 @@
name: "kms"
services:
kms:
image: "11notes/kms:465f4d1"
container_name: "kms"
app:
image: "11notes/kms:1.0.1"
environment:
TZ: "Europe/Zurich"
volumes:
@@ -10,15 +9,20 @@ services:
ports:
- "1688:1688/tcp"
restart: "always"
kms-gui:
image: "11notes/kms-gui:stable"
container_name: "kms-gui"
gui:
image: "11notes/kms-gui:1.0.1"
depends_on:
app:
condition: "service_healthy"
restart: true
environment:
TZ: "Europe/Zurich"
volumes:
- "var:/kms/var"
ports:
- "8080:8080/tcp"
- "3000:3000/tcp"
restart: "always"
volumes:
var:

View File

@@ -54,7 +54,6 @@ ${{ content_defaults }}
${{ content_environment }}
| `KMS_LOCALE` | see Microsoft LICD specification | 1033 (en-US) |
| `KMS_CLIENTCOUNT` | client count > 25 | 26 |
| `KMS_ACTIVATIONINTERVAL` | Retry unsuccessful after N minutes | 120 (2 hours) |
| `KMS_RENEWALINTERVAL` | re-activation after N minutes | 259200 (180 days) |
| `KMS_LOGLEVEL` | CRITICAL, ERROR, WARNING, INFO, DEBUG, MININFO | INFO |

File diff suppressed because it is too large Load Diff

View File

@@ -1,268 +0,0 @@
#!/usr/bin/env python3
import binascii
import logging
import time
import uuid
from pykms_Structure import Structure
from pykms_DB2Dict import kmsDB2Dict
from pykms_PidGenerator import epidGenerator
from pykms_Filetimes import filetime_to_dt
from pykms_Sql import sql_update, sql_update_epid
from pykms_Format import justify, byterize, enco, deco, pretty_printer
#--------------------------------------------------------------------------------------------------------------------------------------------------------
loggersrv = logging.getLogger('logsrv')
class UUID(Structure):
commonHdr = ()
structure = (
('raw', '16s'),
)
def get(self):
return uuid.UUID(bytes_le = enco(str(self), 'latin-1'))
class kmsBase:
def __init__(self, data, srv_config):
self.data = data
self.srv_config = srv_config
class kmsRequestStruct(Structure):
commonHdr = ()
structure = (
('versionMinor', '<H'),
('versionMajor', '<H'),
('isClientVm', '<I'),
('licenseStatus', '<I'),
('graceTime', '<I'),
('applicationId', ':', UUID),
('skuId', ':', UUID),
('kmsCountedId' , ':', UUID),
('clientMachineId', ':', UUID),
('requiredClientCount', '<I'),
('requestTime', '<Q'),
('previousClientMachineId', ':', UUID),
('machineName', 'u'),
('_mnPad', '_-mnPad', '126-len(machineName)'),
('mnPad', ':'),
)
def getMachineName(self):
return self['machineName'].decode('utf-16le')
def getLicenseStatus(self):
return kmsBase.licenseStates[self['licenseStatus']] or "Unknown"
class kmsResponseStruct(Structure):
commonHdr = ()
structure = (
('versionMinor', '<H'),
('versionMajor', '<H'),
('epidLen', '<I=len(kmsEpid)+2'),
('kmsEpid', 'u'),
('clientMachineId', ':', UUID),
('responseTime', '<Q'),
('currentClientCount', '<I'),
('vLActivationInterval', '<I'),
('vLRenewalInterval', '<I'),
)
class GenericRequestHeader(Structure):
commonHdr = ()
structure = (
('bodyLength1', '<I'),
('bodyLength2', '<I'),
('versionMinor', '<H'),
('versionMajor', '<H'),
('remainder', '_'),
)
licenseStates = {
0 : "Unlicensed",
1 : "Activated",
2 : "Grace Period",
3 : "Out-of-Tolerance Grace Period",
4 : "Non-Genuine Grace Period",
5 : "Notifications Mode",
6 : "Extended Grace Period",
}
licenseStatesEnum = {
'unlicensed' : 0,
'licensed' : 1,
'oobGrace' : 2,
'ootGrace' : 3,
'nonGenuineGrace' : 4,
'notification' : 5,
'extendedGrace' : 6
}
def getPadding(self, bodyLength):
## https://forums.mydigitallife.info/threads/71213-Source-C-KMS-Server-from-Microsoft-Toolkit?p=1277542&viewfull=1#post1277542
return 4 + (((~bodyLength & 3) + 1) & 3)
def serverLogic(self, kmsRequest):
pretty_printer(num_text = 15, where = "srv")
kmsRequest = byterize(kmsRequest)
loggersrv.debug("KMS Request Bytes: \n%s\n" % justify(deco(binascii.b2a_hex(enco(str(kmsRequest), 'latin-1')), 'latin-1')))
loggersrv.debug("KMS Request: \n%s\n" % justify(kmsRequest.dump(print_to_stdout = False)))
clientMachineId = kmsRequest['clientMachineId'].get()
applicationId = kmsRequest['applicationId'].get()
skuId = kmsRequest['skuId'].get()
requestDatetime = filetime_to_dt(kmsRequest['requestTime'])
# Localize the request time, if module "tzlocal" is available.
try:
from datetime import datetime
from tzlocal import get_localzone
from pytz.exceptions import UnknownTimeZoneError
try:
local_dt = datetime.fromisoformat(str(requestDatetime)).astimezone(get_localzone())
except UnknownTimeZoneError:
pretty_printer(log_obj = loggersrv.warning,
put_text = "{reverse}{yellow}{bold}Unknown time zone ! Request time not localized.{end}")
local_dt = requestDatetime
except ImportError:
pretty_printer(log_obj = loggersrv.warning,
put_text = "{reverse}{yellow}{bold}Module 'tzlocal' or 'pytz' not available ! Request time not localized.{end}")
local_dt = requestDatetime
except Exception as e:
# Just in case something else goes wrong
loggersrv.warning('Okay, something went horribly wrong while localizing the request time (proceeding anyways): ' + str(e))
local_dt = requestDatetime
pass
# Activation threshold.
# https://docs.microsoft.com/en-us/windows/deployment/volume-activation/activate-windows-10-clients-vamt
MinClients = kmsRequest['requiredClientCount']
RequiredClients = MinClients * 2
if self.srv_config["clientcount"] != None:
if 0 < self.srv_config["clientcount"] < MinClients:
# fixed to 6 (product server) or 26 (product desktop)
currentClientCount = MinClients + 1
pretty_printer(log_obj = loggersrv.warning,
put_text = "{reverse}{yellow}{bold}Not enough clients ! Fixed with %s, but activated client \
could be detected as not genuine !{end}" %currentClientCount)
elif MinClients <= self.srv_config["clientcount"] < RequiredClients:
currentClientCount = self.srv_config["clientcount"]
pretty_printer(log_obj = loggersrv.warning,
put_text = "{reverse}{yellow}{bold}With count = %s, activated client could be detected as not genuine !{end}" %currentClientCount)
elif self.srv_config["clientcount"] >= RequiredClients:
# fixed to 10 (product server) or 50 (product desktop)
currentClientCount = RequiredClients
if self.srv_config["clientcount"] > RequiredClients:
pretty_printer(log_obj = loggersrv.warning,
put_text = "{reverse}{yellow}{bold}Too many clients ! Fixed with %s{end}" %currentClientCount)
else:
# fixed to 10 (product server) or 50 (product desktop)
currentClientCount = RequiredClients
# Get a name for SkuId, AppId.
kmsdb = kmsDB2Dict()
appName, skuName = str(applicationId), str(skuId)
appitems = kmsdb[2]
for appitem in appitems:
kmsitems = appitem['KmsItems']
for kmsitem in kmsitems:
skuitems = kmsitem['SkuItems']
for skuitem in skuitems:
try:
if uuid.UUID(skuitem['Id']) == skuId:
skuName = skuitem['DisplayName']
break
except:
skuName = skuId
pretty_printer(log_obj = loggersrv.warning,
put_text = "{reverse}{yellow}{bold}Can't find a name for this product !{end}")
try:
if uuid.UUID(appitem['Id']) == applicationId:
appName = appitem['DisplayName']
except:
appName = applicationId
pretty_printer(log_obj = loggersrv.warning,
put_text = "{reverse}{yellow}{bold}Can't find a name for this application group !{end}")
infoDict = {
"machineName" : kmsRequest.getMachineName(),
"clientMachineId" : str(clientMachineId),
"appId" : appName,
"skuId" : skuName,
"licenseStatus" : kmsRequest.getLicenseStatus(),
"requestTime" : int(time.time()),
"kmsEpid" : None,
"machineIp" : self.srv_config['raddr']
}
loggersrv.info("Machine Name: %s" % infoDict["machineName"])
loggersrv.info("Machine IP: %s" % infoDict["machineIp"])
loggersrv.info("Client Machine ID: %s" % infoDict["clientMachineId"])
loggersrv.info("Application ID: %s" % infoDict["appId"])
loggersrv.info("SKU ID: %s" % infoDict["skuId"])
loggersrv.info("License Status: %s" % infoDict["licenseStatus"])
loggersrv.info("Request Time: %s" % local_dt.strftime('%Y-%m-%d %H:%M:%S %Z (UTC%z)'))
if self.srv_config['loglevel'] == 'MININFO':
loggersrv.mininfo("", extra = {'host': self.srv_config['raddr'],
'status' : infoDict["licenseStatus"],
'product' : infoDict["skuId"]})
# Create database.
if self.srv_config['sqlite']:
sql_update(self.srv_config['sqlite'], infoDict)
return self.createKmsResponse(kmsRequest, currentClientCount, appName)
def createKmsResponse(self, kmsRequest, currentClientCount, appName):
response = self.kmsResponseStruct()
response['versionMinor'] = kmsRequest['versionMinor']
response['versionMajor'] = kmsRequest['versionMajor']
if not self.srv_config["epid"]:
response["kmsEpid"] = epidGenerator(kmsRequest['kmsCountedId'].get(), kmsRequest['versionMajor'],
self.srv_config["lcid"]).encode('utf-16le')
else:
response["kmsEpid"] = self.srv_config["epid"].encode('utf-16le')
response['clientMachineId'] = kmsRequest['clientMachineId']
# rule: timeserver - 4h <= timeclient <= timeserver + 4h, check if is satisfied (TODO).
response['responseTime'] = kmsRequest['requestTime']
response['currentClientCount'] = currentClientCount
response['vLActivationInterval'] = self.srv_config["activation"]
response['vLRenewalInterval'] = self.srv_config["renewal"]
# Update database epid.
if self.srv_config['sqlite']:
sql_update_epid(self.srv_config['sqlite'], kmsRequest, response, appName)
loggersrv.info("Server ePID: %s" % response["kmsEpid"].decode('utf-16le'))
return response
import pykms_RequestV4, pykms_RequestV5, pykms_RequestV6, pykms_RequestUnknown
def generateKmsResponseData(data, srv_config):
version = kmsBase.GenericRequestHeader(data)['versionMajor']
currentDate = time.strftime("%a %b %d %H:%M:%S %Y")
if version == 4:
loggersrv.info("Received V%d request on %s." % (version, currentDate))
messagehandler = pykms_RequestV4.kmsRequestV4(data, srv_config)
elif version == 5:
loggersrv.info("Received V%d request on %s." % (version, currentDate))
messagehandler = pykms_RequestV5.kmsRequestV5(data, srv_config)
elif version == 6:
loggersrv.info("Received V%d request on %s." % (version, currentDate))
messagehandler = pykms_RequestV6.kmsRequestV6(data, srv_config)
else:
loggersrv.info("Unhandled KMS version V%d." % version)
messagehandler = pykms_RequestUnknown.kmsRequestUnknown(data, srv_config)
return messagehandler.executeRequestLogic()

View File

@@ -1,545 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import binascii
import re
import sys
import socket
import uuid
import logging
import os
import threading
import socketserver
import queue as Queue
import selectors
from time import monotonic as time
import pykms_RpcBind, pykms_RpcRequest
from pykms_RpcBase import rpcBase
from pykms_Dcerpc import MSRPCHeader
from pykms_Misc import check_setup, check_lcid, check_other
from pykms_Misc import KmsParser, KmsParserException, KmsParserHelp
from pykms_Misc import kms_parser_get, kms_parser_check_optionals, kms_parser_check_positionals, kms_parser_check_connect
from pykms_Format import enco, deco, pretty_printer, justify
from pykms_Connect import MultipleListener
from pykms_Sql import sql_initialize
srv_version = "py-kms_2020-10-01"
__license__ = "The Unlicense"
__author__ = u"Matteo an <SystemRage@protonmail.com>"
__url__ = "https://github.com/SystemRage/py-kms"
srv_description = "py-kms: KMS Server Emulator written in Python"
srv_config = {}
##---------------------------------------------------------------------------------------------------------------------------------------------------------
class KeyServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
daemon_threads = True
def __init__(self, server_address, RequestHandlerClass, bind_and_activate = True, want_dual = False):
socketserver.BaseServer.__init__(self, server_address, RequestHandlerClass)
self.__shutdown_request = False
self.r_service, self.w_service = socket.socketpair()
if hasattr(selectors, 'PollSelector'):
self._ServerSelector = selectors.PollSelector
else:
self._ServerSelector = selectors.SelectSelector
if bind_and_activate:
try:
self.multisock = MultipleListener(server_address, want_dual = want_dual)
except Exception as e:
if want_dual and str(e) == "dualstack_ipv6 not supported on this platform":
try:
pretty_printer(log_obj = loggersrv.warning,
put_text = "{reverse}{yellow}{bold}%s. Creating not dualstack sockets...{end}" %str(e))
self.multisock = MultipleListener(server_address, want_dual = False)
except Exception as e:
pretty_printer(log_obj = loggersrv.error, to_exit = True,
put_text = "{reverse}{red}{bold}%s. Exiting...{end}" %str(e))
else:
pretty_printer(log_obj = loggersrv.error, to_exit = True,
put_text = "{reverse}{red}{bold}%s. Exiting...{end}" %str(e))
if self.multisock.cant_dual:
delim = ('' if len(self.multisock.cant_dual) == 1 else ', ')
pretty_printer(log_obj = loggersrv.warning,
put_text = "{reverse}{yellow}{bold}IPv4 [%s] can't be dualstack{end}" %delim.join(self.multisock.cant_dual))
def pykms_serve(self):
""" Mixing of socketserver serve_forever() and handle_request() functions,
without elements blocking tkinter.
Handle one request at a time, possibly blocking.
Respects self.timeout.
"""
# Support people who used socket.settimeout() to escape
# pykms_serve() before self.timeout was available.
timeout = self.multisock.gettimeout()
if timeout is None:
timeout = self.timeout
elif self.timeout is not None:
timeout = min(timeout, self.timeout)
if timeout is not None:
deadline = time() + timeout
try:
# Wait until a request arrives or the timeout expires.
with self._ServerSelector() as selector:
self.multisock.register(selector)
# self-pipe trick.
selector.register(fileobj = self.r_service.fileno(), events = selectors.EVENT_READ)
while not self.__shutdown_request:
ready = selector.select(timeout)
if self.__shutdown_request:
break
if ready == []:
if timeout is not None:
timeout = deadline - time()
if timeout < 0:
return self.handle_timeout()
else:
for key, mask in ready:
if key.fileobj in self.multisock.filenos():
self.socket = self.multisock.sockmap[key.fileobj]
self.server_address = self.socket.getsockname()
self._handle_request_noblock()
elif key.fileobj is self.r_service.fileno():
# only to clean buffer.
msgkill = os.read(self.r_service.fileno(), 8).decode('utf-8')
sys.exit(0)
finally:
self.__shutdown_request = False
def shutdown(self):
self.__shutdown_request = True
def server_close(self):
self.multisock.close()
def handle_timeout(self):
pretty_printer(log_obj = loggersrv.error, to_exit = True,
put_text = "{reverse}{red}{bold}Server connection timed out. Exiting...{end}")
def handle_error(self, request, client_address):
pass
class server_thread(threading.Thread):
def __init__(self, queue, name):
threading.Thread.__init__(self)
self.name = name
self.queue = queue
self.server = None
self.is_running_server = False
self.checked = False
self.is_running_thread = threading.Event()
def terminate_serve(self):
self.server.shutdown()
self.server.server_close()
self.server = None
self.is_running_server = False
def terminate_thread(self):
self.is_running_thread.set()
def terminate_eject(self):
os.write(self.server.w_service.fileno(), u''.encode('utf-8'))
def run(self):
while not self.is_running_thread.is_set():
try:
item = self.queue.get(block = True, timeout = 0.1)
self.queue.task_done()
except Queue.Empty:
continue
else:
try:
if item == 'start':
self.eject = False
self.is_running_server = True
# Check options.
if not self.checked:
server_check()
# Create and run server.
self.server = server_create()
self.server.pykms_serve()
except (SystemExit, Exception) as e:
self.eject = True
raise
##---------------------------------------------------------------------------------------------------------------------------------------------------------
loggersrv = logging.getLogger('logsrv')
def _str2bool(v):
if isinstance(v, bool):
return v
if v.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise ValueError('Boolean value expected.')
# 'help' string - 'default' value - 'dest' string.
srv_options = {
'ip' : {'help' : 'The IP address (IPv4 or IPv6) to listen on. The default is \"::\" (all interfaces).', 'def' : "::", 'des' : "ip"},
'port' : {'help' : 'The network port to listen on. The default is \"1688\".', 'def' : 1688, 'des' : "port"},
'epid' : {'help' : 'Use this option to manually specify an ePID to use. If no ePID is specified, a random ePID will be auto generated.',
'def' : None, 'des' : "epid"},
'lcid' : {'help' : 'Use this option to manually specify an LCID for use with randomly generated ePIDs. Default is \"1033\" (en-us)',
'def' : 1033, 'des' : "lcid"},
'count' : {'help' : 'Use this option to specify the current client count. A number >=25 is required to enable activation of client OSes; \
for server OSes and Office >=5', 'def' : None, 'des' : "clientcount"},
'activation' : {'help' : 'Use this option to specify the activation interval (in minutes). Default is \"120\" minutes (2 hours).',
'def' : 120, 'des': "activation"},
'renewal' : {'help' : 'Use this option to specify the renewal interval (in minutes). Default is \"10080\" minutes (7 days).',
'def' : 1440 * 7, 'des' : "renewal"},
'sql' : {'help' : 'Use this option to store request information from unique clients in an SQLite database. Deactivated by default.', 'def' : False,
'file': os.path.join('.', 'pykms_database.db'), 'des' : "sqlite"},
'hwid' : {'help' : 'Use this option to specify a HWID. The HWID must be an 16-character string of hex characters. \
Type \"RANDOM\" to auto-generate the HWID.',
'def' : "RANDOM", 'des' : "hwid"},
'time0' : {'help' : 'Maximum inactivity time (in seconds) after which the connection with the client is closed. If \"None\" (default) serve forever.',
'def' : None, 'des' : "timeoutidle"},
'time1' : {'help' : 'Set the maximum time to wait for sending / receiving a request / response. Default is no timeout.',
'def' : None, 'des' : "timeoutsndrcv"},
'asyncmsg' : {'help' : 'Prints pretty / logging messages asynchronously. Deactivated by default.',
'def' : False, 'des' : "asyncmsg"},
'llevel' : {'help' : 'Use this option to set a log level. The default is \"WARNING\".', 'def' : "WARNING", 'des' : "loglevel",
'choi' : ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "MININFO"]},
'lfile' : {'help' : 'Use this option to set an output log file. The default is \"pykms_logserver.log\". \
Type \"STDOUT\" to view log info on stdout. Type \"FILESTDOUT\" to combine previous actions. \
Use \"STDOUTOFF\" to disable stdout messages. Use \"FILEOFF\" if you not want to create logfile.',
'def' : os.path.join('.', 'pykms_logserver.log'), 'des' : "logfile"},
'lsize' : {'help' : 'Use this flag to set a maximum size (in MB) to the output log file. Deactivated by default.', 'def' : 0, 'des': "logsize"},
'listen' : {'help' : 'Adds multiple listening ip address - port couples.', 'des': "listen"},
'backlog' : {'help' : 'Specifies the maximum length of the queue of pending connections. Default is \"5\".', 'def' : 5, 'des': "backlog"},
'reuse' : {'help' : 'Do not allows binding / listening to the same address and port. Reusing port is activated by default.', 'def' : True,
'des': "reuse"},
'dual' : {'help' : 'Allows listening to an IPv6 address while also accepting connections via IPv4. If used, it refers to all addresses (main and additional). Activated by default. Pass in "false" or "true" to disable or enable.',
'def' : True, 'des': "dual"}
}
def server_options():
server_parser = KmsParser(description = srv_description, epilog = 'version: ' + srv_version, add_help = False)
server_parser.add_argument("ip", nargs = "?", action = "store", default = srv_options['ip']['def'], help = srv_options['ip']['help'], type = str)
server_parser.add_argument("port", nargs = "?", action = "store", default = srv_options['port']['def'], help = srv_options['port']['help'], type = int)
server_parser.add_argument("-e", "--epid", action = "store", dest = srv_options['epid']['des'], default = srv_options['epid']['def'],
help = srv_options['epid']['help'], type = str)
server_parser.add_argument("-l", "--lcid", action = "store", dest = srv_options['lcid']['des'], default = srv_options['lcid']['def'],
help = srv_options['lcid']['help'], type = int)
server_parser.add_argument("-c", "--client-count", action = "store", dest = srv_options['count']['des'] , default = srv_options['count']['def'],
help = srv_options['count']['help'], type = str)
server_parser.add_argument("-a", "--activation-interval", action = "store", dest = srv_options['activation']['des'],
default = srv_options['activation']['def'], help = srv_options['activation']['help'], type = int)
server_parser.add_argument("-r", "--renewal-interval", action = "store", dest = srv_options['renewal']['des'],
default = srv_options['renewal']['def'], help = srv_options['renewal']['help'], type = int)
server_parser.add_argument("-s", "--sqlite", nargs = "?", dest = srv_options['sql']['des'], const = True,
default = srv_options['sql']['def'], help = srv_options['sql']['help'], type = str)
server_parser.add_argument("-w", "--hwid", action = "store", dest = srv_options['hwid']['des'], default = srv_options['hwid']['def'],
help = srv_options['hwid']['help'], type = str)
server_parser.add_argument("-t0", "--timeout-idle", action = "store", dest = srv_options['time0']['des'], default = srv_options['time0']['def'],
help = srv_options['time0']['help'], type = str)
server_parser.add_argument("-t1", "--timeout-sndrcv", action = "store", dest = srv_options['time1']['des'], default = srv_options['time1']['def'],
help = srv_options['time1']['help'], type = str)
server_parser.add_argument("-y", "--async-msg", action = "store_true", dest = srv_options['asyncmsg']['des'],
default = srv_options['asyncmsg']['def'], help = srv_options['asyncmsg']['help'])
server_parser.add_argument("-V", "--loglevel", action = "store", dest = srv_options['llevel']['des'], choices = srv_options['llevel']['choi'],
default = srv_options['llevel']['def'], help = srv_options['llevel']['help'], type = str)
server_parser.add_argument("-F", "--logfile", nargs = "+", action = "store", dest = srv_options['lfile']['des'],
default = srv_options['lfile']['def'], help = srv_options['lfile']['help'], type = str)
server_parser.add_argument("-S", "--logsize", action = "store", dest = srv_options['lsize']['des'], default = srv_options['lsize']['def'],
help = srv_options['lsize']['help'], type = float)
server_parser.add_argument("-h", "--help", action = "help", help = "show this help message and exit")
## Connection parsing.
connection_parser = KmsParser(description = "connect options", add_help = False)
connection_subparser = connection_parser.add_subparsers(dest = "mode")
connect_parser = connection_subparser.add_parser("connect", add_help = False)
connect_parser.add_argument("-n", "--listen", action = "append", dest = srv_options['listen']['des'], default = [],
help = srv_options['listen']['help'], type = str)
connect_parser.add_argument("-b", "--backlog", action = "append", dest = srv_options['backlog']['des'], default = [],
help = srv_options['backlog']['help'], type = int)
connect_parser.add_argument("-u", "--no-reuse", action = "append_const", dest = srv_options['reuse']['des'], const = False, default = [],
help = srv_options['reuse']['help'])
connect_parser.add_argument("-d", "--dual", type = _str2bool, dest = srv_options['dual']['des'], default = srv_options['dual']['def'],
help = srv_options['dual']['help'])
try:
userarg = sys.argv[1:]
# Run help.
if any(arg in ["-h", "--help"] for arg in userarg):
KmsParserHelp().printer(parsers = [server_parser, (connection_parser, connect_parser)])
# Get stored arguments.
pykmssrv_zeroarg, pykmssrv_onearg = kms_parser_get(server_parser)
connect_zeroarg, connect_onearg = kms_parser_get(connect_parser)
subdict = {
'connect' : (connect_zeroarg, connect_onearg, connection_parser.parse_args)
}
subpars = list(subdict.keys())
pykmssrv_zeroarg += subpars # add subparsers
exclude_kms = ['-F', '--logfile']
exclude_dup = ['-n', '--listen', '-b', '--backlog', '-u', '--no-reuse']
# Set defaults for server dict config.
# example case:
# python3 pykms_Server.py
srv_config.update(vars(server_parser.parse_args([])))
subindx = sorted([(userarg.index(pars), pars) for pars in subpars if pars in userarg], key = lambda x: x[0])
if subindx:
# Set `daemon options` and/or `connect options` for server dict config.
# example cases:
# 1 python3 pykms_Server.py [1.2.3.4] [1234] [--pykms_optionals] connect [--connect_optionals]
first = subindx[0][0]
# initial.
kms_parser_check_optionals(userarg[0 : first], pykmssrv_zeroarg, pykmssrv_onearg, exclude_opt_len = exclude_kms)
kms_parser_check_positionals(srv_config, server_parser.parse_args, arguments = userarg[0 : first], force_parse = True)
# middle.
for i in range(len(subindx) - 1):
posi, posf, typ = subindx[i][0], subindx[i + 1][0], subindx[i][1]
kms_parser_check_optionals(userarg[posi : posf], subdict[typ][0], subdict[typ][1], msg = 'optional %s' %typ,
exclude_opt_dup = (exclude_dup if typ == 'connect' else []))
kms_parser_check_positionals(srv_config, subdict[typ][2], arguments = userarg[posi : posf], msg = 'positional %s' %typ)
# final.
pos, typ = subindx[-1]
kms_parser_check_optionals(userarg[pos:], subdict[typ][0], subdict[typ][1], msg = 'optional %s' %typ,
exclude_opt_dup = (exclude_dup if typ == 'connect' else []))
kms_parser_check_positionals(srv_config, subdict[typ][2], arguments = userarg[pos:], msg = 'positional %s' %typ)
if len(subindx) > 1:
srv_config['mode'] = '+'.join(elem[1] for elem in subindx)
else:
# Update `pykms options` for server dict config.
# example case:
# 2 python3 pykms_Server.py [1.2.3.4] [1234] [--pykms_optionals]
kms_parser_check_optionals(userarg, pykmssrv_zeroarg, pykmssrv_onearg, exclude_opt_len = exclude_kms)
kms_parser_check_positionals(srv_config, server_parser.parse_args)
kms_parser_check_connect(srv_config, srv_options, userarg, connect_zeroarg, connect_onearg)
except KmsParserException as e:
pretty_printer(put_text = "{reverse}{red}{bold}%s. Exiting...{end}" %str(e), to_exit = True)
def server_check():
# Setup and some checks.
check_setup(srv_config, srv_options, loggersrv, where = "srv")
# Random HWID.
if srv_config['hwid'] == "RANDOM":
randomhwid = uuid.uuid4().hex
srv_config['hwid'] = randomhwid[:16]
# Sanitize HWID.
hexstr = srv_config['hwid']
# Strip 0x from the start of hexstr
if hexstr.startswith("0x"):
hexstr = hexstr[2:]
hexsub = re.sub(r'[^0-9a-fA-F]', '', hexstr)
diff = set(hexstr).symmetric_difference(set(hexsub))
if len(diff) != 0:
diff = str(diff).replace('{', '').replace('}', '')
pretty_printer(log_obj = loggersrv.error, to_exit = True,
put_text = "{reverse}{red}{bold}HWID '%s' is invalid. Digit %s non hexadecimal. Exiting...{end}" %(hexstr.upper(), diff))
else:
lh = len(hexsub)
if lh % 2 != 0:
pretty_printer(log_obj = loggersrv.error, to_exit = True,
put_text = "{reverse}{red}{bold}HWID '%s' is invalid. Hex string is odd length. Exiting...{end}" %hexsub.upper())
elif lh < 16:
pretty_printer(log_obj = loggersrv.error, to_exit = True,
put_text = "{reverse}{red}{bold}HWID '%s' is invalid. Hex string is too short. Exiting...{end}" %hexsub.upper())
elif lh > 16:
pretty_printer(log_obj = loggersrv.error, to_exit = True,
put_text = "{reverse}{red}{bold}HWID '%s' is invalid. Hex string is too long. Exiting...{end}" %hexsub.upper())
else:
srv_config['hwid'] = binascii.a2b_hex(hexsub)
# Check LCID.
srv_config['lcid'] = check_lcid(srv_config['lcid'], loggersrv.warning)
# Check sqlite.
if srv_config['sqlite']:
if srv_config['sqlite'] is True: # Resolve bool to the default path
srv_config['sqlite'] = srv_options['sql']['file']
if os.path.isdir(srv_config['sqlite']):
pretty_printer(log_obj = loggersrv.warning,
put_text = "{reverse}{yellow}{bold}You specified a folder instead of a database file! This behavior is not officially supported anymore, please change your start parameters soon.{end}")
srv_config['sqlite'] = os.path.join(srv_config['sqlite'], 'pykms_database.db')
try:
import sqlite3
sql_initialize(srv_config['sqlite'])
except ImportError:
pretty_printer(log_obj = loggersrv.warning,
put_text = "{reverse}{yellow}{bold}Module 'sqlite3' not installed, database support disabled.{end}")
srv_config['sqlite'] = False
# Check other specific server options.
opts = [('clientcount', '-c/--client-count'),
('timeoutidle', '-t0/--timeout-idle'),
('timeoutsndrcv', '-t1/--timeout-sndrcv')]
check_other(srv_config, opts, loggersrv, where = 'srv')
# Check further addresses / ports.
if 'listen' in srv_config:
addresses = []
for elem in srv_config['listen']:
try:
addr, port = elem.split(',')
except ValueError:
pretty_printer(log_obj = loggersrv.error, to_exit = True,
put_text = "{reverse}{red}{bold}argument `-n/--listen`: %s not well defined. Exiting...{end}" %elem)
try:
port = int(port)
except ValueError:
pretty_printer(log_obj = loggersrv.error, to_exit = True,
put_text = "{reverse}{red}{bold}argument `-n/--listen`: port number '%s' is invalid. Exiting...{end}" %port)
if not (1 <= port <= 65535):
pretty_printer(log_obj = loggersrv.error, to_exit = True,
put_text = "{reverse}{red}{bold}argument `-n/--listen`: port number '%s' is invalid. Enter between 1 - 65535. Exiting...{end}" %port)
addresses.append((addr, port))
srv_config['listen'] = addresses
def server_create():
# Create address list (when the current user indicates execution inside the Windows Sandbox,
# then we wont allow port reuse - it is not supported).
all_address = [(
srv_config['ip'], srv_config['port'],
(srv_config['backlog_main'] if 'backlog_main' in srv_config else srv_options['backlog']['def']),
(srv_config['reuse_main'] if 'reuse_main' in srv_config else srv_options['reuse']['def'])
)]
log_address = "TCP server listening at %s on port %d" %(srv_config['ip'], srv_config['port'])
if 'listen' in srv_config:
for l, b, r in zip(srv_config['listen'], srv_config['backlog'], srv_config['reuse']):
all_address.append(l + (b,) + (r,))
log_address += justify("at %s on port %d" %(l[0], l[1]), indent = 56)
server = KeyServer(all_address, kmsServerHandler, want_dual = (srv_config['dual'] if 'dual' in srv_config else srv_options['dual']['def']))
server.timeout = srv_config['timeoutidle']
loggersrv.info(log_address)
loggersrv.info("HWID: %s" % deco(binascii.b2a_hex(srv_config['hwid']), 'utf-8').upper())
return server
def server_terminate(generic_srv, exit_server = False, exit_thread = False):
if exit_server:
generic_srv.terminate_serve()
if exit_thread:
generic_srv.terminate_thread()
class ServerWithoutGui(object):
def start(self):
import queue as Queue
daemon_queue = Queue.Queue(maxsize = 0)
daemon_serverthread = server_thread(daemon_queue, name = "Thread-Srv-Daemon")
daemon_serverthread.setDaemon(True)
# options already checked in `server_main_terminal`.
daemon_serverthread.checked = True
daemon_serverthread.start()
daemon_queue.put('start')
return 0, daemon_serverthread
def join(self, daemon_serverthread):
while daemon_serverthread.is_alive():
daemon_serverthread.join(timeout = 0.5)
def clean(self, daemon_serverthread):
server_terminate(daemon_serverthread, exit_server = True, exit_thread = True)
def server_main_terminal():
# Parse options.
server_options()
# Check options.
server_check()
serverthread.checked = True
# Run threaded server.
serverqueue.put('start')
# Wait to finish.
try:
while serverthread.is_alive():
serverthread.join(timeout = 0.5)
except (KeyboardInterrupt, SystemExit):
server_terminate(serverthread, exit_server = True, exit_thread = True)
class kmsServerHandler(socketserver.BaseRequestHandler):
def setup(self):
loggersrv.info("Connection accepted: %s:%d" %(self.client_address[0], self.client_address[1]))
srv_config['raddr'] = str(self.client_address[0])
def handle(self):
self.request.settimeout(srv_config['timeoutsndrcv'])
while True:
# self.request is the TCP socket connected to the client
try:
self.data = self.request.recv(1024)
if self.data == '' or not self.data:
pretty_printer(log_obj = loggersrv.debug, # use debug, as the healthcheck will spam this
put_text = "{reverse}{yellow}{bold}No data received.{end}")
break
except socket.error as e:
pretty_printer(log_obj = loggersrv.error,
put_text = "{reverse}{red}{bold}While receiving: %s{end}" %str(e))
break
packetType = MSRPCHeader(self.data)['type']
if packetType == rpcBase.packetType['bindReq']:
loggersrv.info("RPC bind request received.")
pretty_printer(num_text = [-2, 2], where = "srv")
handler = pykms_RpcBind.handler(self.data, srv_config)
elif packetType == rpcBase.packetType['request']:
loggersrv.info("Received activation request.")
pretty_printer(num_text = [-2, 13], where = "srv")
handler = pykms_RpcRequest.handler(self.data, srv_config)
else:
pretty_printer(log_obj = loggersrv.error,
put_text = "{reverse}{red}{bold}Invalid RPC request type %s.{end}" %packetType)
break
res = enco(str(handler.populate()), 'latin-1')
if packetType == rpcBase.packetType['bindReq']:
loggersrv.info("RPC bind acknowledged.")
pretty_printer(num_text = [-3, 5, 6], where = "srv")
elif packetType == rpcBase.packetType['request']:
loggersrv.info("Responded to activation request.")
pretty_printer(num_text = [-3, 18, 19], where = "srv")
try:
self.request.send(res)
if packetType == rpcBase.packetType['request']:
break
except socket.error as e:
pretty_printer(log_obj = loggersrv.error,
put_text = "{reverse}{red}{bold}While sending: %s{end}" %str(e))
break
def finish(self):
self.request.close()
loggersrv.info("Connection closed: %s:%d" %(self.client_address[0], self.client_address[1]))
serverqueue = Queue.Queue(maxsize = 0)
serverthread = server_thread(serverqueue, name = "Thread-Srv")
serverthread.daemon = True
serverthread.start()
if __name__ == "__main__":
server_main_terminal()

View File

@@ -1,142 +0,0 @@
#!/usr/bin/env python3
import datetime
import os
import logging
# sqlite3 is optional.
try:
import sqlite3
except ImportError:
pass
from pykms_Format import pretty_printer
#--------------------------------------------------------------------------------------------------------------------------------------------------------
loggersrv = logging.getLogger('logsrv')
def sql_initialize(dbName):
if not os.path.isfile(dbName):
# Initialize the database.
loggersrv.debug(f'Initializing database file "{dbName}"...')
con = None
try:
con = sqlite3.connect(dbName)
cur = con.cursor()
cur.execute("CREATE TABLE clients(clientMachineId TEXT , machineName TEXT, applicationId TEXT, skuId TEXT, licenseStatus TEXT, lastRequestTime INTEGER, kmsEpid TEXT, requestCount INTEGER, machineIp TEXT, PRIMARY KEY(clientMachineId, applicationId))")
except sqlite3.Error as e:
pretty_printer(log_obj = loggersrv.error, to_exit = True, put_text = "{reverse}{red}{bold}Sqlite Error: %s. Exiting...{end}" %str(e))
finally:
if con:
con.commit()
con.close()
else:
# Update the database.
loggersrv.debug(f'Updating database file "{dbName}"...')
con = None
try:
con = sqlite3.connect(dbName)
cur = con.cursor()
cur.execute("ALTER TABLE clients ADD COLUMN machineIp TEXT")
except sqlite3.Error as e:
pretty_printer(log_obj = loggersrv.debug, to_exit = False, put_text = "{reverse}Sqlite Error: %s.{end}" %str(e))
finally:
if con:
con.commit()
con.close()
def sql_get_all(dbName):
if not os.path.isfile(dbName):
return None
with sqlite3.connect(dbName) as con:
cur = con.cursor()
cur.execute("SELECT * FROM clients")
clients = []
for row in cur.fetchall():
clients.append({
'clientMachineId': row[0],
'machineName': row[1],
'applicationId': row[2],
'skuId': row[3],
'licenseStatus': row[4],
'lastRequestTime': datetime.datetime.fromtimestamp(row[5]).isoformat(),
'kmsEpid': row[6],
'requestCount': row[7],
'machineIp': row[8],
})
return clients
def sql_update(dbName, infoDict):
con = None
try:
con = sqlite3.connect(dbName)
cur = con.cursor()
cur.execute("SELECT * FROM clients WHERE clientMachineId=:clientMachineId AND applicationId=:appId;", infoDict)
try:
data = cur.fetchone()
if not data:
# Insert row.
cur.execute("INSERT INTO clients (clientMachineId, machineName, applicationId, \
skuId, licenseStatus, lastRequestTime, requestCount, machineIp) VALUES (:clientMachineId, :machineName, :appId, :skuId, :licenseStatus, :requestTime, 1, :machineIp);", infoDict)
else:
# Update data.
if data[1] != infoDict["machineName"]:
cur.execute("UPDATE clients SET machineName=:machineName WHERE \
clientMachineId=:clientMachineId AND applicationId=:appId;", infoDict)
if data[2] != infoDict["appId"]:
cur.execute("UPDATE clients SET applicationId=:appId WHERE \
clientMachineId=:clientMachineId AND applicationId=:appId;", infoDict)
if data[3] != infoDict["skuId"]:
cur.execute("UPDATE clients SET skuId=:skuId WHERE \
clientMachineId=:clientMachineId AND applicationId=:appId;", infoDict)
if data[4] != infoDict["licenseStatus"]:
cur.execute("UPDATE clients SET licenseStatus=:licenseStatus WHERE \
clientMachineId=:clientMachineId AND applicationId=:appId;", infoDict)
if data[5] != infoDict["requestTime"]:
cur.execute("UPDATE clients SET lastRequestTime=:requestTime WHERE \
clientMachineId=:clientMachineId AND applicationId=:appId;", infoDict)
if data[8] != infoDict["machineIp"]:
cur.execute("UPDATE clients SET machineIp=:machineIp WHERE \
clientMachineId=:clientMachineId AND applicationId=:appId;", infoDict)
# Increment requestCount
cur.execute("UPDATE clients SET requestCount=requestCount+1 WHERE \
clientMachineId=:clientMachineId AND applicationId=:appId;", infoDict)
except sqlite3.Error as e:
pretty_printer(log_obj = loggersrv.error, to_exit = True,
put_text = "{reverse}{red}{bold}Sqlite Error: %s. Exiting...{end}" %str(e))
except sqlite3.Error as e:
pretty_printer(log_obj = loggersrv.error, to_exit = True,
put_text = "{reverse}{red}{bold}Sqlite Error: %s. Exiting...{end}" %str(e))
finally:
if con:
con.commit()
con.close()
def sql_update_epid(dbName, kmsRequest, response, appName):
cmid = str(kmsRequest['clientMachineId'].get())
con = None
try:
con = sqlite3.connect(dbName)
cur = con.cursor()
cur.execute("SELECT * FROM clients WHERE clientMachineId=? AND applicationId=?;", (cmid, appName))
try:
data = cur.fetchone()
cur.execute("UPDATE clients SET kmsEpid=? WHERE \
clientMachineId=? AND applicationId=?;", (str(response["kmsEpid"].decode('utf-16le')), cmid, appName))
except sqlite3.Error as e:
pretty_printer(log_obj = loggersrv.error, to_exit = True,
put_text = "{reverse}{red}{bold}Sqlite Error: %s. Exiting...{end}" %str(e))
except sqlite3.Error as e:
pretty_printer(log_obj = loggersrv.error, to_exit = True,
put_text = "{reverse}{red}{bold}Sqlite Error: %s. Exiting...{end}" %str(e))
finally:
if con:
con.commit()
con.close()

View File

@@ -12,7 +12,6 @@
0.0.0.0 \
1688 \
-l ${KMS_LOCALE} \
-c ${KMS_CLIENTCOUNT} \
-a ${KMS_ACTIVATIONINTERVAL} \
-r ${KMS_RENEWALINTERVAL} \
-s /kms/var/kms.db \