Multiple fixes

* Implemented ScriptRunner with environment expansion and cleaner
  code.
* Added ApiClient plugin (com.noccy.apiclient)
* Renamed CHANGELOG.md to VERSIONS.md
* Shuffled buildtools
* Added first unittests
This commit is contained in:
Chris 2021-12-11 01:44:01 +01:00
parent 8c6f7c1e93
commit 8cc1eac7a4
33 changed files with 1976 additions and 891 deletions

44
.spark/build/package.sh Executable file
View File

@ -0,0 +1,44 @@
#!/bin/bash
VERSION="$(git describe --tags)"
PATH="$PWD/tools:$PATH"
if [ -z "$VERSION" ]; then
echo "Could not parse version from git"
exit 1
fi
if [ -z $DESTINATION ]; then
DESTINATION="release/$VERSION"
fi
echo " DESTINATION=$DESTINATION"
echo "* Preparing release direcory"
mkdir -p $DESTINATION
echo "* Building phar archive"
pharlite &>/dev/null
echo "* Copying files to release directory"
cp spark.phar $DESTINATION/spark.phar
cp README.md $DESTINATION/README.md
cp VERSIONS.md $DESTINATION/VERSIONS.md
echo "* Building dist and source archives"
7z a -tzip "$DESTINATION/spark-$VERSION-dist.zip" spark.phar plugins README.md VERSIONS.md >/dev/null
tar cfz "$DESTINATION/spark-$VERSION-dist.tgz" spark.phar plugins README.md VERSIONS.md
7z a -tzip "$DESTINATION/spark-$VERSION-src.zip" bin src runtime plugins composer.json README.md VERSIONS.md >/dev/null
tar cfz "$DESTINATION/spark-$VERSION-src.tgz" bin src runtime plugins composer.json README.md VERSIONS.md
echo "* Creating makeself installer"
test -d release/tmp && rm -rf release/tmp
mkdir release/tmp
cp -R spark.phar plugins README.md VERSIONS.md release/tmp/
pushd release/tmp &>/dev/null
makeself . ../../$DESTINATION/spark-$VERSION-dist.run "Spark $VERSION" ./spark.phar install &>/dev/null
popd &>/dev/null
rm -rf release/tmp
echo -e " ┌─[$DESTINATION]"
ls -hl --color=auto $DESTINATION | sed "s/^/ │ /"

View File

@ -8,5 +8,5 @@ if [ -z "$VERSION" ]; then
fi
echo -e "<?php define(\"APP_VERSION\", \"$VERSION\");" > src/version
echo "[Version: $VERSION]"
echo -e "* Version: \e[1m$VERSION\e[0m"

View File

@ -1,33 +0,0 @@
#!/bin/bash
VERSION="$(git describe --tags)"
PATH="$PWD/tools:$PATH"
if [ -z "$VERSION" ]; then
echo "Could not parse version from git"
exit 1
fi
mkdir -p release/$VERSION
pharlite
cp spark.phar release/$VERSION/spark.phar
cp README.md release/$VERSION/README.md
cp CHANGELOG.md release/$VERSION/CHANGELOG.md
7z a -tzip "release/$VERSION/spark-$VERSION-dist.zip" spark.phar plugins README.md CHANGELOG.md
tar cvfz "release/$VERSION/spark-$VERSION-dist.tgz" spark.phar plugins README.md CHANGELOG.md
7z a -tzip "release/$VERSION/spark-$VERSION-src.zip" bin src runtime plugins composer.json README.md CHANGELOG.md
tar cvfz "release/$VERSION/spark-$VERSION-src.tgz" bin src runtime plugins composer.json README.md CHANGELOG.md
test -d release/tmp && rm -rf release/tmp
mkdir release/tmp
cp -R spark.phar plugins README.md CHANGELOG.md release/tmp/
pushd release/tmp
makeself . ../$VERSION/spark-$VERSION-dist.run "Spark $VERSION" ./spark.phar install
popd
rm -rf release/tmp
echo -e "\e[1m[release/$VERSION]\e[0m"
ls -hl release/$VERSION

View File

@ -1,11 +1,18 @@
{
"preload": [
".spark/plugins/*"
".spark/plugins/*",
".spark/local/*"
],
"scripts": {
"version": [
".spark/build/update-version.sh"
],
"package": [
".spark/build/package.sh"
],
"build": [
".spark/update-version.sh",
".spark/package.sh"
"@version",
"@package"
]
}
}

View File

@ -81,7 +81,7 @@ The advantage of writing your extensions as plugins:
Using scripts is the simplest way to leverage Spark:
*spark.json*
```
```json
{
...
"scripts": {
@ -95,14 +95,16 @@ Using scripts is the simplest way to leverage Spark:
}
```
`.php`-files are executed in-process, and as such have access to any registered
resources, resource types and plugins.
*Note: The script system need to be improved and revamped to support environment variables and such*
- Call on other scripts by prepending `@` to the script name.
- `.php`-files are executed in-process, and as such have access to any registered
resources, resource types and plugins.
- `.phar` files are still executed out-of-process, as are any commands that don't
match a PHP callable or any other specific rule.
- Substitute shell variables using `${varname}`.
### Resources
Resources are wrappers around database connections and such, providing a cleaner
interface to its innards.
Resources are generally registered by plugins.
Resources are generally registered by plugins or local scripts.

View File

@ -1,4 +1,4 @@
# Changelog
# Versions
## 0.1.0
@ -8,3 +8,6 @@
- Spark will automatically chdir to the project root when loading the environment.
This makes sure scripts and plugins etc all start at a known location.
- Added makeself and pharlite in `tools`.
- Rewritten script runner with proper variable substitution.
- Added utility libraries for HTTP requests (Guzzle), templating (Twig) and dotenv
support (symfony/dotenv, activates with environment).

View File

@ -36,6 +36,9 @@
"symfony/process": "^6.0",
"psr/log": "^3.0",
"symfony/var-dumper": "^6.0",
"symfony/yaml": "^6.0"
"symfony/yaml": "^6.0",
"guzzlehttp/guzzle": "^7.4",
"twig/twig": "^3.3",
"symfony/dotenv": "^6.0"
}
}

26
phpunit.xml Normal file
View File

@ -0,0 +1,26 @@
<?xml version="1.0" encoding="UTF-8"?>
<phpunit xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:noNamespaceSchemaLocation="https://schema.phpunit.de/9.5/phpunit.xsd"
bootstrap="vendor/autoload.php"
cacheResultFile=".phpunit.cache/test-results"
executionOrder="depends,defects"
forceCoversAnnotation="true"
beStrictAboutCoversAnnotation="true"
beStrictAboutOutputDuringTests="true"
beStrictAboutTodoAnnotatedTests="true"
failOnRisky="true"
failOnWarning="true"
verbose="true">
<testsuites>
<testsuite name="default">
<directory suffix="Test.php">tests</directory>
</testsuite>
</testsuites>
<coverage cacheDirectory=".phpunit.cache/code-coverage"
processUncoveredFiles="true">
<include>
<directory suffix=".php">src</directory>
</include>
</coverage>
</phpunit>

View File

@ -0,0 +1,84 @@
<?php // "name":"Call on web APIs", "author":"Noccy"
namespace SparkPlug\Com\Noccy\ApiClient\Api;
use JsonSerializable;
class Catalog implements JsonSerializable
{
private array $properties = [];
private array $methods = [];
private ?string $name;
private ?string $info;
public function __construct(array $catalog=[])
{
$catalog = $catalog['catalog']??[];
$this->name = $catalog['name']??null;
$this->info = $catalog['info']??null;
foreach ($catalog['props']??[] as $k=>$v) {
$this->properties[$k] = $v;
}
foreach ($catalog['methods']??[] as $k=>$v) {
$this->methods[$k] = new Method($v);
}
}
public static function createFromFile(string $filename): Catalog
{
$json = file_get_contents($filename);
$catalog = json_decode($json, true);
$catalog['name'] = basename($filename, ".json");
return new Catalog($catalog);
}
public function getName(): ?string
{
return $this->name;
}
public function getInfo(): ?string
{
return $this->info;
}
public function getProperties(): array
{
return $this->properties;
}
public function applyProperties(array $props)
{
$this->properties = array_merge($this->properties, $props);
}
public function addMethod(string $name, Method $method)
{
$this->methods[$name] = $method;
}
public function getMethod(string $method): ?Method
{
return $this->methods[$method]??null;
}
public function getMethods(): array
{
return $this->methods;
}
public function jsonSerialize(): mixed
{
return [
'catalog' => [
'name' => $this->name,
'info' => $this->info,
'props' => $this->properties,
'methods' => $this->methods,
]
];
}
}

View File

@ -0,0 +1,36 @@
<?php // "name":"Call on web APIs", "author":"Noccy"
namespace SparkPlug\Com\Noccy\ApiClient\Api;
use JsonSerializable;
class Method implements JsonSerializable
{
private array $properties = [];
private ?string $info;
public function __construct(array $method)
{
$this->properties = $method['props']??[];
$this->info = $method['info']??null;
}
public function getProperties(): array
{
return $this->properties;
}
public function getInfo(): ?string
{
return $this->info;
}
public function jsonSerialize(): mixed
{
return [
'info' => $this->info,
'props' => $this->properties,
];
}
}

View File

@ -0,0 +1,13 @@
<?php // "name":"Call on web APIs", "author":"Noccy"
namespace SparkPlug\Com\Noccy\ApiClient\Api;
class Profile
{
private array $properties = [];
public function getProperties(): array
{
return $this->properties;
}
}

View File

@ -0,0 +1,91 @@
<?php
namespace SparkPlug\Com\Noccy\ApiClient\Commands;
use Spark\Commands\Command;
use SparkPlug;
use SparkPlug\Com\Noccy\ApiClient\Api\Catalog;
use Symfony\Component\Console\Input\InputArgument;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Output\OutputInterface;
class ApiCatalogCommand extends Command
{
protected function configure()
{
$this->setName("api:catalog")
->setDescription("Manage the API catalogs")
->addOption("create", "c", InputOption::VALUE_REQUIRED, "Create a new catalog")
->addOption("remove", "r", InputOption::VALUE_REQUIRED, "Remove a catalog")
->addOption("set-props", null, InputOption::VALUE_REQUIRED, "Apply properties to a catalog")
->addArgument("properties", InputArgument::IS_ARRAY, "Default properties for the catalog")
->addOption("list", null, InputOption::VALUE_NONE, "Only list catalogs, not methods")
;
}
protected function execute(InputInterface $input, OutputInterface $output)
{
$api = get_plugin('com.noccy.apiclient');
$list = $input->getOption("list");
$dest = get_environment()->getConfigDirectory() . "/api/catalogs";
if ($create = $input->getOption("create")) {
if (file_exists($dest."/".$create.".json")) {
$output->writeln("<error>Catalog {$create} already exists!</>");
return Command::FAILURE;
}
$catalog = new Catalog([
'catalog' => [
'name' => $create
]
]);
file_put_contents($dest."/".$create.".json", json_encode($catalog, JSON_PRETTY_PRINT|JSON_UNESCAPED_SLASHES));
$output->writeln("<info>Created new catalog {$create}</>");
return Command::SUCCESS;
}
if ($remove = $input->getOption("remove")) {
if (!file_exists($dest."/".$remove.".json")) {
$output->writeln("<error>Catalog {$remove} does not exist!</>");
return Command::FAILURE;
}
unlink($dest."/".$remove.".json");
$output->writeln("<info>Removed catalog {$remove}</>");
return Command::SUCCESS;
}
if ($setprops = $input->getOption("set-props")) {
$proparr = [];
$props = $input->getArgument("properties");
foreach ($props as $str) {
if (!str_contains($str,"=")) {
$output->writeln("<error>Ignoring parameter argument '{$str}'</>");
} else {
[$k,$v] = explode("=",$str,2);
$proparr[$k] = $v;
}
}
$catalog = $api->getCatalog($setprops);
$catalog->applyProperties($proparr);
$api->saveCatalog($catalog);
$output->writeln("<info>Updated properties on catalog {$setprops}</>");
return Command::SUCCESS;
}
$catalogs = $api->getCatalogNames();
foreach ($catalogs as $catalog) {
$c = $api->getCatalog($catalog);
if ($list) {
$output->writeln($catalog);
} else {
$output->writeln("\u{25e9} <options=bold>{$catalog}</>: <fg=gray>{$c->getInfo()}</>");
$ms = $c->getMethods();
foreach ($ms as $name=>$m) {
$last = ($m === end($ms));
$output->writeln(($last?"\u{2514}\u{2500}":"\u{251c}\u{2500}")."\u{25a2} {$catalog}.{$name}: <info>{$m->getInfo()}</>");
}
}
}
return Command::SUCCESS;
}
}

View File

@ -0,0 +1,35 @@
<?php
namespace SparkPlug\Com\Noccy\ApiClient\Commands;
use Spark\Commands\Command;
use SparkPlug;
use SparkPlug\Com\Noccy\ApiClient\Api\Method;
use SparkPlug\Com\Noccy\ApiClient\ApiClientPlugin;
use SparkPlug\Com\Noccy\ApiClient\Request\RequestBuilder;
use Symfony\Component\Console\Helper\Table;
use Symfony\Component\Console\Input\InputArgument;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Output\OutputInterface;
class ApiLogsCommand extends Command
{
protected function configure()
{
$this->setName("api:logs")
->setDescription("Show previous requests and manage the log")
->addOption("clear", null, InputOption::VALUE_NONE, "Clear the log")
->addOption("write", "w", InputOption::VALUE_REQUIRED, "Write the formatted entries to a file")
;
}
protected function execute(InputInterface $input, OutputInterface $output)
{
/** @var ApiClientPlugin */
$plugin = get_plugin('com.noccy.apiclient');
return Command::SUCCESS;
}
}

View File

@ -0,0 +1,22 @@
<?php
namespace SparkPlug\Com\Noccy\ApiClient\Commands;
use Spark\Commands\Command;
use SparkPlug;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
class ApiProfileCommand extends Command
{
protected function configure()
{
$this->setName("api:profile")
->setDescription("Manage API profiles");
}
protected function execute(InputInterface $input, OutputInterface $output)
{
return Command::SUCCESS;
}
}

View File

@ -0,0 +1,134 @@
<?php
namespace SparkPlug\Com\Noccy\ApiClient\Commands;
use Spark\Commands\Command;
use SparkPlug;
use SparkPlug\Com\Noccy\ApiClient\Api\Method;
use SparkPlug\Com\Noccy\ApiClient\ApiClientPlugin;
use SparkPlug\Com\Noccy\ApiClient\Request\RequestBuilder;
use Symfony\Component\Console\Helper\Table;
use Symfony\Component\Console\Input\InputArgument;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Output\OutputInterface;
class ApiRequestCommand extends Command
{
protected function configure()
{
$this->setName("api:request")
->setDescription("Send a request")
->addOption("profile", "p", InputOption::VALUE_REQUIRED, "Use profile for request")
->addOption("save", "s", InputOption::VALUE_NONE, "Save to catalog")
->addArgument("method", InputArgument::OPTIONAL, "Request URL or catalog.method")
->addArgument("props", InputArgument::IS_ARRAY, "Parameter key=value pairs")
;
}
protected function execute(InputInterface $input, OutputInterface $output)
{
/** @var ApiClientPlugin */
$plugin = get_plugin('com.noccy.apiclient');
$separator = str_repeat("\u{2500}", 40);
$method = $input->getArgument("method");
$builder = new RequestBuilder();
if (str_contains($method, "://")) {
$builder->setProperties([
'url' => $method
]);
} else {
if (str_contains($method, '.')) {
[$catalog,$method] = explode(".", $method, 2);
$catalogObj = $plugin->getCatalog($catalog);
// if (!$catalogObj) {
// $output->writeln("<error>No such catalog {$catalog}</>");
// return Command::FAILURE;
// }
$methodObj = $catalogObj->getMethod($method);
// if (!$methodObj) {
// $output->writeln("<error>No such method {$method} in catalog {$catalog}</>");
// return Command::FAILURE;
// }
$builder->setCatalog($catalogObj);
$builder->setMethod($methodObj);
}
}
$props = [];
$propstr = $input->getArgument("props");
foreach ($propstr as $str) {
if (!str_contains($str,"=")) {
$output->writeln("<error>Ignoring parameter argument '{$str}'</>");
} else {
[$k,$v] = explode("=",$str,2);
$props[$k] = $v;
}
}
$builder->addProperties($props);
if ($input->getOption("save")) {
$catalogObj = $plugin->getCatalog($catalog);
$methodObj = new Method([
'name' => $method,
'info' => $props['method.info']??null,
'props' => $props
]);
$catalogObj->addMethod($method, $methodObj);
$plugin->saveCatalog($catalogObj);
$output->writeln("<info>Saved method {$method} to catalog {$catalog}</>");
return self::SUCCESS;
}
if ($profile = $input->getOption("profile")) {
$profileObj = $plugin->getProfile($profile);
$builder->setProfile($profileObj);
}
$request = $builder->getRequest();
$table = new Table($output);
$table->setStyle('compact');
$table->setHeaders([ "Request Info", "" ]);
foreach ($request->getInfo() as $i=>$v) {
$table->addRow([$i,$v]);
}
$table->render();
$table = new Table($output);
$table->setStyle('compact');
$table->setHeaders([ "Request Headers", "" ]);
foreach ($request->getHeaders() as $i=>$v) {
$table->addRow([$i,join("\n",$v)]);
}
$table->render();
$output->writeln($separator);
$response = $request->send();
$rheaders = $response->getHeaders();
$table = new Table($output);
$table->setStyle('compact');
$table->setHeaders([ "Response headers", "" ]);
foreach ($rheaders as $h=>$v) {
$table->addRow([$h,join("\n",$v)]);
}
$table->render();
$body = (string)$response->getBody();
$output->writeln($separator);
$parseAs = $builder->getCalculatedProperty('response.parse');
if ($parseAs == 'json') {
dump(json_decode($body));
} else {
$output->writeln($body);
}
$output->writeln($separator);
$output->writeln(strlen($body)." bytes");
return Command::SUCCESS;
}
}

View File

@ -0,0 +1,73 @@
# ApiClient for Spark
## Installation
To install, downlad and extract the plugin directory into your plugin directory.
## Usage
*Note: Profiles are not yet implemented*
You should make a catalog and a profile first. You don't have to, but this will
save you some time.
$ spark api:catalog --create mysite \
protocol=http \
urlbase=http://127.0.0.1:80/api/
$ spark api:profile --create apiuser \
--catalog mysite \
auth.username=apiuser \
auth.token=APITOKEN
You can now add some requests:
$ spark api:request --add mysite.info \
url=v1/info \
http.method=POST \
response.parse=json
And send them:
$ spark api:request -p apiuser mysite.info
## Internals
ApiClient works on a map of properties, populated with the defaults from the
catalog. The request properties are then appied, followed by the profile
properties.
### Properties
```
# Core properties
protocol={"http"|"websocket"|"xmlrpc"|"jsonrpc"}
# Final URL is [urlbase+]url
urlbase={url}
url={url}
# Authentication options
auth.username={username}
auth.password={password}
auth.token={token}
auth.type={"basic"|"bearer"}
# HTTP options
http.method={"GET"|"POST"|...}
http.version={"1.0"|"1.1"|"2.0"}
http.header.{name}={value}
http.query.{field}={value}
http.body={raw-body}
http.body.json={object}
# RPC options
rpc.method={string}
rpc.argument.{index}={value}
# Request handling
request.follow-redirecs={"auto"|"no"|"yes"}
request.max-redirects={number}
# Response handling
response.parse={"none"|"json"|"yaml"|"xml"}
response.good="200,201,202,203,204,205,206,207,208"
```

View File

@ -0,0 +1,91 @@
<?php
namespace SparkPlug\Com\Noccy\ApiClient\Request;
use GuzzleHttp\Client;
use GuzzleHttp\Psr7\Response;
class HttpRequest extends Request
{
private string $method = 'GET';
private string $version = '1.1';
private ?string $url = null;
private array $query = [];
private array $headers = [];
public function __construct(array $props)
{
foreach ($props as $prop=>$value) {
if (str_starts_with($prop, 'http.')) {
$this->handleHttpProp(substr($prop,5), $value);
} elseif ($prop == 'url') {
$this->url = $value;
}
}
}
private function handleHttpProp(string $prop, $value)
{
if (str_starts_with($prop, 'query.')) {
$this->query[substr($prop, 6)] = $value;
} elseif (str_starts_with($prop, 'header.')) {
$this->headers[substr($prop, 7)] = $value;
} elseif ($prop === 'method') {
$this->method = strtoupper($value);
} elseif ($prop === 'version') {
$this->version = $value;
} else {
fprintf(STDERR, "Warning: unhandled prop: http.%s (%s)\n", $prop, $value);
}
}
public function getInfo(): array
{
$query = http_build_query($this->query);
$headers = [];
foreach ($this->headers as $k=>$v) {
// Convert to Proper-Case unless UPPERCASE
if ($k !== strtoupper($k))
$k = ucwords($k, '-');
// Build the header
$headers[] = sprintf("<options=bold>%s</>: %s", $k, $v);
}
return [
'protocol' => sprintf("HTTP/%s %s", $this->version, $this->method),
'query' => $this->url . "?" . $query,
'body' => "Empty body"
];
}
public function getHeaders(): array
{
$headers = [];
foreach ($this->headers as $k=>$v) {
// Convert to Proper-Case unless UPPERCASE
if ($k !== strtoupper($k))
$k = ucwords($k, '-');
// Build the header
$headers[$k] = (array)$v;
}
return $headers;
}
public function send(): ?Response
{
$query = http_build_query($this->query);
$url = $this->url . ($query?'?'.$query:'');
$config = [];
$client = new Client($config);
$options = [];
$response = $client->request($this->method, $url, $options);
return $response;
}
}

View File

@ -0,0 +1,26 @@
<?php
namespace SparkPlug\Com\Noccy\ApiClient\Request;
use GuzzleHttp\Psr7\Response;
class JsonRpcRequest extends Request
{
public function getInfo(): array
{
return [
];
}
public function send(): ?Response
{
return null;
}
public function getHeaders(): array
{
return [];
}
}

View File

@ -0,0 +1,16 @@
<?php
namespace SparkPlug\Com\Noccy\ApiClient\Request;
use GuzzleHttp\Psr7\Response;
abstract class Request
{
abstract public function send(): ?Response;
abstract public function getInfo(): array;
abstract public function getHeaders(): array;
}

View File

@ -0,0 +1,100 @@
<?php
namespace SparkPlug\Com\Noccy\ApiClient\Request;
use SparkPlug\Com\Noccy\ApiClient\Api\Catalog;
use SparkPlug\Com\Noccy\ApiClient\Api\Method;
use SparkPlug\Com\Noccy\ApiClient\Api\Profile;
class RequestBuilder
{
public static $Protocols = [
'http' => HttpRequest::class,
'websocket' => WebSocketRequest::class,
'jsonrpc' => JsonRpcRequest::class,
];
private ?Catalog $catalog = null;
private ?Method $method = null;
private ?Profile $profile = null;
private array $props = [];
public function setCatalog(?Catalog $catalog)
{
$this->catalog = $catalog;
return $this;
}
public function setMethod(?Method $method)
{
$this->method = $method;
return $this;
}
public function setProfile(?Profile $profile)
{
$this->profile = $profile;
return $this;
}
public function setProperties(array $properties)
{
$this->props = $properties;
}
public function addProperties(array $properties)
{
$this->props = array_merge(
$this->props,
$properties
);
}
private function buildProperties()
{
$props = [];
if ($this->catalog) {
$add = $this->catalog->getProperties();
$props = array_merge($props, $add);
}
if ($this->method) {
$add = $this->method->getProperties();
$props = array_merge($props, $add);
}
if ($this->profile) {
$add = $this->profile->getProperties();
$props = array_merge($props, $add);
}
$props = array_merge($props, $this->props);
$props = array_filter($props);
return $props;
}
public function getCalculatedProperty(string $name)
{
$props = $this->buildProperties();
return $props[$name] ?? null;
}
public function getRequest(): Request
{
$props = $this->buildProperties();
$protocol = $props['protocol']??'http';
if (!$handler = self::$Protocols[$protocol]??null) {
throw new \Exception("Invalid protocol for request: {$protocol}");
}
$base = $props['urlbase']??null;
$url = $props['url']??null;
if ($base) {
$props['url'] = $base . $url;
}
$request = new $handler($props);
return $request;
}
}

View File

@ -0,0 +1,26 @@
<?php
namespace SparkPlug\Com\Noccy\ApiClient\Request;
use GuzzleHttp\Psr7\Response;
class WebsocketRequest extends Request
{
public function getInfo(): array
{
return [
];
}
public function send(): ?Response
{
return null;
}
public function getHeaders(): array
{
return [];
}
}

View File

@ -0,0 +1,109 @@
<?php // "name":"Call on web APIs", "author":"Noccy"
namespace SparkPlug\Com\Noccy\ApiClient;
use SparkPlug;
class ApiClientPlugin extends SparkPlug
{
private array $catalogs = [];
private array $profiles = [];
public function load()
{
register_command(new Commands\ApiCatalogCommand());
register_command(new Commands\ApiRequestCommand());
register_command(new Commands\ApiProfileCommand());
register_command(new Commands\ApiLogsCommand());
}
private function loadCatalogs()
{
$env = get_environment();
$catalogDir = $env->getConfigDirectory() . "/api/catalogs";
if (file_exists($catalogDir)) {
$catalogFiles = glob($catalogDir."/*.json");
foreach ($catalogFiles as $catalogFile) {
$name = basename($catalogFile, ".json");
$this->catalogs[$name] = Api\Catalog::createFromFile($catalogFile);
}
}
}
private function loadProfiles()
{
$env = get_environment();
$catalogDir = $env->getConfigDirectory() . "/api/profiles";
if (file_exists($catalogDir)) {
$catalogFiles = glob($catalogDir."/*.json");
foreach ($catalogFiles as $catalogFile) {
}
}
}
public function createCatalog(string $name): ?Api\Catalog
{
return null;
}
public function saveCatalog(Api\Catalog $catalog)
{
$env = get_environment();
$catalogDir = $env->getConfigDirectory() . "/api/catalogs";
$catalogFile = $catalogDir . "/" . $catalog->getName() . ".json";
if (!is_dir($catalogDir)) {
mkdir($catalogDir, 0777, true);
}
file_put_contents($catalogFile."~", json_encode($catalog, JSON_PRETTY_PRINT|JSON_UNESCAPED_SLASHES));
rename($catalogFile."~", $catalogFile);
}
public function deleteCatalog(string $name)
{
}
public function getCatalog(string $name): ?Api\Catalog
{
if (empty($this->catalogs)) $this->loadCatalogs();
return $this->catalogs[$name]??null;
}
public function getCatalogNames(): array
{
if (empty($this->catalogs)) $this->loadCatalogs();
return array_keys($this->catalogs);
}
public function saveProfile(string $name, Api\Profile $profile)
{
}
public function deleteProfile(string $name)
{
}
public function getProfile(string $name): ?Api\Profile
{
if (empty($this->profiles)) $this->loadProfiles();
return null;
}
public function getProfileNames(): array
{
if (empty($this->profiles)) $this->loadProfiles();
return array_keys($this->profiles);
}
}
register_plugin("com.noccy.apiclient", new ApiClientPlugin);

View File

@ -65,7 +65,7 @@ class DockerDbExportCommand extends Command
$cmd = sprintf("mysqldump -u%s -p%s %s", $dbuser, $dbpass, $database);
break;
}
$this->exportFromService($service, $cmd, $output);
$this->exportFromService($service, $cmd, $output, $input->getOption("output"));
} elseif ($dsn) {
$url = parse_url($dsn);
if (empty($url)) {
@ -77,9 +77,13 @@ class DockerDbExportCommand extends Command
return Command::SUCCESS;
}
private function exportFromService(string $service, string $command, OutputInterface $output)
private function exportFromService(string $service, string $command, OutputInterface $output, ?string $outfile=null)
{
$cmd = sprintf("docker-compose exec -T %s %s", $service, $command);
if ($outfile) {
$cmd .= " > ".escapeshellarg($outfile);
}
$output->writeln(sprintf("→ <info>%s</>", $cmd));
passthru($cmd);
}
}

View File

@ -70,10 +70,10 @@ class PluginsCommand extends Command
$info = object();
}
$installed = in_array(basename($plugin), $localPluginList);
$badge = ($installed)?"<fg=green>\u{2714}</>":"<fg=gray>\u{27f3}</>";
$output->writeln(sprintf(" %s <fg=#0ff>%-20s</> %s", $badge, basename($plugin), $info->name??null));
$badge = ($installed)?"<fg=green>\u{2714}</>":"<fg=gray>\u{25cc}</>";
$output->writeln(sprintf(" %s <fg=%s>%-20s</> %s", $badge, ($installed?"#0ff":"#088"), basename($plugin), $info->name??null));
}
return Command::SUCCESS;
}
}
}

View File

@ -34,7 +34,9 @@ class RunCommand extends Command
}
return Command::SUCCESS;
} elseif ($script = $input->getArgument('script')) {
$env->runScript($script, $args, $input, $output);
$runner = $env->getScriptRunner();
//$env->runScript($script, $args, $input, $output);
$runner->evaluateDefinedScript($script); // args?
}
return Command::SUCCESS;

View File

@ -8,6 +8,7 @@ use Spark\SparkApplication;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
use Symfony\Component\Console\Style\SymfonyStyle;
use Symfony\Component\Dotenv\Dotenv;
class Environment
{
@ -36,12 +37,23 @@ class Environment
return array_keys($this->config['scripts']??[]);
}
public function getScriptRunner(): ScriptRunner
{
$runner = new ScriptRunner();
$runner->setDirectory($this->getProjectDirectory());
foreach ((array)$this->config['scripts'] as $name => $script) {
$runner->defineScript($name, $script);
}
return $runner;
}
/*
public function runScript(string $name, array $args, InputInterface $input, OutputInterface $output)
{
$script = $this->config['scripts'][$name]??$name;
if (is_string($script)) {
$this->execScript($script, $args);
$this->execScript($script, $args, $output);
} elseif (is_array($script)) {
foreach ($script as $row) {
$a = str_getcsv($row, ' ', "'");
@ -50,13 +62,13 @@ class Environment
$c = ($this->config['scripts'][substr($c,1)])??$c;
$this->runScript($c, $a, $input, $output);
} else {
$this->execScript($c, $a);
$this->execScript($c, $a, $output);
}
}
}
}
private function execScript(string $script, array $args)
private function execScript(string $script, array $args, OutputInterface $output)
{
// call script directly
if (str_ends_with($script, '.php')) {
@ -74,6 +86,7 @@ class Environment
passthru($script);
}
}
*/
public function loadEnvironment()
{
@ -84,9 +97,24 @@ class Environment
if (!array_key_exists('project_dir', $this->config)) {
return;
}
chdir($this->config['project_dir']);
$envfile = $this->config['project_dir']."/.env";
if (file_exists($envfile)) {
$dotenv = new Dotenv();
$dotenv->load($envfile);
}
$blacklistFile = $this->config['config_dir']."/blacklist";
if (file_exists($blacklistFile)) {
$blacklist = json_decode(file_get_contents($blacklistFile), true);
}
if (empty($this->config['preload'])) {
fprintf(STDERR, "Error: Missing or malformed spark.json file.\n");
exit(1);
}
// $this->logger->info("Loading environment...");
$preloads = [];
$root = $this->config['project_dir'];
@ -102,7 +130,7 @@ class Environment
if (!str_starts_with($item, "/")) {
$item = $this->getProjectDirectory() . "/" . $item;
}
if (is_file($item)) {
if (is_file($item) && fnmatch("*.php", $item)) {
// $this->logger->debug("Preloading file {$item}");
try {
include_once($item);
@ -119,8 +147,8 @@ class Environment
//$this->logger->error("Error preloading plugin {$item}: {$t->getMessage()} in {$t->getFile()} on line {$t->getLine()}");
}
} else {
fprintf(STDERR, "warning: Could not preload %s\n", $item);
//$this->logger->warning("Could not preload {$item}");
// fprintf(STDERR, "warning: Could not preload %s\n", $item);
// $this->logger->warning("Could not preload {$item}");
}
}
@ -163,6 +191,11 @@ class Environment
return $env;
}
public function expandString(string $input): string
{
return preg_replace_callback('/([\%\$]\{(.+?)\}/', function ($v) {
print_r($v);
}, $input);
}
}

View File

@ -0,0 +1,91 @@
<?php
namespace Spark\Environment;
class ScriptRunner
{
private array $scripts = [];
private ?string $directory = null;
public function setDirectory(?string $directory)
{
$this->directory = $directory;
}
public function defineScript(string $name, string|array $script)
{
$this->scripts[$name] = $script;
}
public function evaluateDefinedScript(string $name)
{
$script = $this->scripts[$name];
$this->evaluate($script);
}
public function evaluate(string|array $script)
{
if (is_array($script)) {
foreach ($script as $step) {
$this->evaluate($step);
}
return;
}
$script = $this->expandString($script);
// Determine what to do
if (str_starts_with($script, '@')) {
// starts with @, call on a defined script
$subname = substr($script, 1);
$subscript = $this->scripts[$subname];
$this->evaluate($subscript);
} else {
if (posix_isatty(STDOUT)) {
printf("\e[0;33m> \e[0;93m%s\e[0m\n", $script);
} else {
printf("> %s\n", $script);
}
if (str_contains($script, ' ')) {
[$script, $args] = explode(" ", $script, 2);
$args = str_getcsv($args, ' ', "'");
} else {
$args = [];
}
if (is_callable($script)) {
// call script
call_user_func($script, ...$args);
} elseif (file_exists((string)$script) && fnmatch("*.php", (string)$script)) {
include $script;
} else {
// call shell
$cmdl = trim(escapeshellcmd((string)$script) . " " . join(" ", array_map("escapeshellarg", $args)));
$proc = proc_open($cmdl, [ 0 => STDIN, 1 => STDOUT, 2 => STDERR ], $pipes, $this->directory);
while ($stat = proc_get_status($proc)) {
if ($stat['running'] === false) {
$ec = $stat['exitcode'];
if ($ec != 0) {
printf("\e[31mcommand exited with code %d.\e[0m\n", $stat['exitcode']);
throw new \RuntimeException("Command {$cmdl} exited with code {$ec}");
}
break;
}
usleep(100000);
}
}
}
}
public function expandString(string $input)
{
return preg_replace_callback('/(\$\{(.+?)\})/', function ($match) {
return ($_ENV[$match[2]]??getenv($match[2]))??null;
}, $input);
}
}

View File

@ -25,7 +25,7 @@ class SparkApplication extends Application
public function __construct()
{
parent::__construct("Spark", APP_VERSION);
parent::__construct("Spark\u{26a1}", APP_VERSION);
self::$instance = $this;
$this->resourceManager = new ResourceManager();
@ -39,6 +39,10 @@ class SparkApplication extends Application
$this->add(new Commands\ReplCommand());
$this->add(new Commands\InitCommand());
$this->get("list")->setHidden(true);
$this->get("completion")->setHidden(true);
$this->get("help")->setHidden(true);
if (getenv("SPARK_PLUGINS")) {
$this->add(new Commands\PluginsCommand());
}

View File

@ -1,5 +1,10 @@
<?php
if (!(file_exists(getcwd()."/plugins") && file_exists(getcwd()."/spark.phar"))) {
fwrite(STDERR, "Not running from installer directory! Already installed?\n");
exit(1);
}
function askConfirm(string $prompt, bool $default) {
$pstr = sprintf("%s [%s]? ", $prompt, $default?"Y/n":"y/N");
@ -26,7 +31,13 @@ function askString(string $prompt, ?string $default=null) {
}
printf("\n%s\n\e[1mSpark\e[0m Installer\n%s\n\n", str_repeat("\u{2500}",40), str_repeat("\u{2500}", 40));
echo " ___ _ \n";
echo "/ __|_ __ __ _ _ _| |__\n";
echo "\\__ \\ '_ \\/ _` | '_| / /\n";
echo "|___/ .__/\\__,_|_| |_\\_\\\n";
echo " |_| \n";
printf("\n%s\n \u{26a1} \e[1mSpark\e[0m Installer\n%s\n\n", str_repeat("\u{2500}",40), str_repeat("\u{2500}", 40));
$destination = askString("Installation directory", getenv("HOME")."/opt/spark");
$binaries = askString("Path for executables", getenv("HOME")."/bin");
@ -57,14 +68,17 @@ passthru("cp -R plugins/* ".escapeshellarg($destination."/plugins/"));
if ($doPlugins) {
$file = sprintf("export SPARK_PLUGINS=\"%s/plugins\"\n", $destination);
file_put_contents(getenv("HOME")."/.profile_spark", $file);
file_put_contents(getenv("HOME")."/.bashrc_spark", $file);
printf("Updated \e[3m.bashrc_spark\e[0m.\n");
$file = file_get_contents(getenv("HOME")."/.profile");
$file .= "\nsource ~/.profile_spark\n";
file_put_contents(getenv("HOME")."/.profile.new", $file);
rename(getenv("HOME")."/.profile", getenv("HOME")."/.profile.bak");
rename(getenv("HOME")."/.profile.new", getenv("HOME")."/.profile");
printf("Updated \e[3m.profile\e[0m.\n");
$file = file_get_contents(getenv("HOME")."/.bashrc");
if (!str_contains($file, ".bashrc_spark")) {
$file .= "\nsource ~/.bashrc_spark\n";
file_put_contents(getenv("HOME")."/.bashrc.new", $file);
rename(getenv("HOME")."/.bashrc", getenv("HOME")."/.bashrc.bak");
rename(getenv("HOME")."/.bashrc.new", getenv("HOME")."/.bashrc");
printf("Updated \e[3m.bashrc\e[0m.\n");
}
}
if ($doAliases) {
$file = file_get_contents(getenv("HOME")."/.bash_aliases") . "\n";

View File

@ -0,0 +1,30 @@
<?php
namespace Spark\Environment;
class ScriptRunnerTest extends \PhpUnit\Framework\TestCase
{
/**
* @dataProvider stringExpansionData
* @covers ScriptRunner::expandString
*/
public function testStringExpansion($source, $expect)
{
$runner = new ScriptRunner();
$expanded = $runner->expandString($source);
return $this->assertEquals($expect, $expanded);
}
public function stringExpansionData()
{
return [
[ 'Hello World!', 'Hello World!' ],
[ '${testenv}', '' ],
[ '${PATH}', getenv("PATH") ],
[ 'Greetings ${USER}', 'Greetings '.getenv("USER") ],
];
}
}

View File

@ -1 +0,0 @@
makeself-2.4.5/makeself.sh

822
tools/makeself Executable file
View File

@ -0,0 +1,822 @@
#!/bin/sh
#
# Makeself version 2.4.x
# by Stephane Peter <megastep@megastep.org>
#
# Utility to create self-extracting tar.gz archives.
# The resulting archive is a file holding the tar.gz archive with
# a small Shell script stub that uncompresses the archive to a temporary
# directory and then executes a given script from withing that directory.
#
# Makeself home page: https://makeself.io/
#
# Version 2.0 is a rewrite of version 1.0 to make the code easier to read and maintain.
#
# Version history :
# - 1.0 : Initial public release
# - 1.1 : The archive can be passed parameters that will be passed on to
# the embedded script, thanks to John C. Quillan
# - 1.2 : Package distribution, bzip2 compression, more command line options,
# support for non-temporary archives. Ideas thanks to Francois Petitjean
# - 1.3 : More patches from Bjarni R. Einarsson and Francois Petitjean:
# Support for no compression (--nocomp), script is no longer mandatory,
# automatic launch in an xterm, optional verbose output, and -target
# archive option to indicate where to extract the files.
# - 1.4 : Improved UNIX compatibility (Francois Petitjean)
# Automatic integrity checking, support of LSM files (Francois Petitjean)
# - 1.5 : Many bugfixes. Optionally disable xterm spawning.
# - 1.5.1 : More bugfixes, added archive options -list and -check.
# - 1.5.2 : Cosmetic changes to inform the user of what's going on with big
# archives (Quake III demo)
# - 1.5.3 : Check for validity of the DISPLAY variable before launching an xterm.
# More verbosity in xterms and check for embedded command's return value.
# Bugfix for Debian 2.0 systems that have a different "print" command.
# - 1.5.4 : Many bugfixes. Print out a message if the extraction failed.
# - 1.5.5 : More bugfixes. Added support for SETUP_NOCHECK environment variable to
# bypass checksum verification of archives.
# - 1.6.0 : Compute MD5 checksums with the md5sum command (patch from Ryan Gordon)
# - 2.0 : Brand new rewrite, cleaner architecture, separated header and UNIX ports.
# - 2.0.1 : Added --copy
# - 2.1.0 : Allow multiple tarballs to be stored in one archive, and incremental updates.
# Added --nochown for archives
# Stopped doing redundant checksums when not necesary
# - 2.1.1 : Work around insane behavior from certain Linux distros with no 'uncompress' command
# Cleaned up the code to handle error codes from compress. Simplified the extraction code.
# - 2.1.2 : Some bug fixes. Use head -n to avoid problems.
# - 2.1.3 : Bug fixes with command line when spawning terminals.
# Added --tar for archives, allowing to give arbitrary arguments to tar on the contents of the archive.
# Added --noexec to prevent execution of embedded scripts.
# Added --nomd5 and --nocrc to avoid creating checksums in archives.
# Added command used to create the archive in --info output.
# Run the embedded script through eval.
# - 2.1.4 : Fixed --info output.
# Generate random directory name when extracting files to . to avoid problems. (Jason Trent)
# Better handling of errors with wrong permissions for the directory containing the files. (Jason Trent)
# Avoid some race conditions (Ludwig Nussel)
# Unset the $CDPATH variable to avoid problems if it is set. (Debian)
# Better handling of dot files in the archive directory.
# - 2.1.5 : Made the md5sum detection consistent with the header code.
# Check for the presence of the archive directory
# Added --encrypt for symmetric encryption through gpg (Eric Windisch)
# Added support for the digest command on Solaris 10 for MD5 checksums
# Check for available disk space before extracting to the target directory (Andreas Schweitzer)
# Allow extraction to run asynchronously (patch by Peter Hatch)
# Use file descriptors internally to avoid error messages (patch by Kay Tiong Khoo)
# - 2.1.6 : Replaced one dot per file progress with a realtime progress percentage and a spining cursor (Guy Baconniere)
# Added --noprogress to prevent showing the progress during the decompression (Guy Baconniere)
# Added --target dir to allow extracting directly to a target directory (Guy Baconniere)
# - 2.2.0 : Many bugfixes, updates and contributions from users. Check out the project page on Github for the details.
# - 2.3.0 : Option to specify packaging date to enable byte-for-byte reproducibility. (Marc Pawlowsky)
# - 2.4.0 : Optional support for SHA256 checksums in archives.
# - 2.4.2 : Add support for threads for several compressors. (M. Limber)
# Added zstd support.
# - 2.4.3 : Make explicit POSIX tar archives for increased compatibility.
# - 2.4.5 : Added --tar-format to override ustar tar archive format
#
# (C) 1998-2021 by Stephane Peter <megastep@megastep.org>
#
# This software is released under the terms of the GNU GPL version 2 and above
# Please read the license at http://www.gnu.org/copyleft/gpl.html
# Self-extracting archives created with this script are explictly NOT released under the term of the GPL
#
MS_VERSION=2.4.5
MS_COMMAND="$0"
unset CDPATH
for f in ${1+"$@"}; do
MS_COMMAND="$MS_COMMAND \\\\
\\\"$f\\\""
done
# For Solaris systems
if test -d /usr/xpg4/bin; then
PATH=/usr/xpg4/bin:$PATH
export PATH
fi
# Procedures
MS_Usage()
{
echo "Usage: $0 [args] archive_dir file_name label startup_script [script_args]"
echo "args can be one or more of the following :"
echo " --version | -v : Print out Makeself version number and exit"
echo " --help | -h : Print out this help message"
echo " --tar-quietly : Suppress verbose output from the tar command"
echo " --quiet | -q : Do not print any messages other than errors."
echo " --gzip : Compress using gzip (default if detected)"
echo " --pigz : Compress with pigz"
echo " --zstd : Compress with zstd"
echo " --bzip2 : Compress using bzip2 instead of gzip"
echo " --pbzip2 : Compress using pbzip2 instead of gzip"
echo " --xz : Compress using xz instead of gzip"
echo " --lzo : Compress using lzop instead of gzip"
echo " --lz4 : Compress using lz4 instead of gzip"
echo " --compress : Compress using the UNIX 'compress' command"
echo " --complevel lvl : Compression level for gzip pigz zstd xz lzo lz4 bzip2 and pbzip2 (default 9)"
echo " --threads thds : Number of threads to be used by compressors that support parallelization."
echo " Omit to use compressor's default. Most useful (and required) for opting"
echo " into xz's threading, usually with '--threads=0' for all available cores."
echo " pbzip2 and pigz are parallel by default, and setting this value allows"
echo " limiting the number of threads they use."
echo " --base64 : Instead of compressing, encode the data using base64"
echo " --gpg-encrypt : Instead of compressing, encrypt the data using GPG"
echo " --gpg-asymmetric-encrypt-sign"
echo " : Instead of compressing, asymmetrically encrypt and sign the data using GPG"
echo " --gpg-extra opt : Append more options to the gpg command line"
echo " --ssl-encrypt : Instead of compressing, encrypt the data using OpenSSL"
echo " --ssl-passwd pass : Use the given password to encrypt the data using OpenSSL"
echo " --ssl-pass-src src : Use the given src as the source of password to encrypt the data"
echo " using OpenSSL. See \"PASS PHRASE ARGUMENTS\" in man openssl."
echo " If this option is not supplied, the user will be asked to enter"
echo " encryption password on the current terminal."
echo " --ssl-no-md : Do not use \"-md\" option not supported by older OpenSSL."
echo " --nochown : Do not give the target folder to the current user (default)"
echo " --chown : Give the target folder to the current user recursively"
echo " --nocomp : Do not compress the data"
echo " --notemp : The archive will create archive_dir in the"
echo " current directory and uncompress in ./archive_dir"
echo " --needroot : Check that the root user is extracting the archive before proceeding"
echo " --copy : Upon extraction, the archive will first copy itself to"
echo " a temporary directory"
echo " --append : Append more files to an existing Makeself archive"
echo " The label and startup scripts will then be ignored"
echo " --target dir : Extract directly to a target directory"
echo " directory path can be either absolute or relative"
echo " --nooverwrite : Do not extract the archive if the specified target directory exists"
echo " --current : Files will be extracted to the current directory"
echo " Both --current and --target imply --notemp"
echo " --tar-format opt : Specify a tar archive format (default is ustar)"
echo " --tar-extra opt : Append more options to the tar command line"
echo " --untar-extra opt : Append more options to the during the extraction of the tar archive"
echo " --nomd5 : Don't calculate an MD5 for archive"
echo " --nocrc : Don't calculate a CRC for archive"
echo " --sha256 : Compute a SHA256 checksum for the archive"
echo " --header file : Specify location of the header script"
echo " --cleanup file : Specify a cleanup script that executes on interrupt and when finished successfully."
echo " --follow : Follow the symlinks in the archive"
echo " --noprogress : Do not show the progress during the decompression"
echo " --nox11 : Disable automatic spawn of a xterm"
echo " --nowait : Do not wait for user input after executing embedded"
echo " program from an xterm"
echo " --sign passphrase : Signature private key to sign the package with"
echo " --lsm file : LSM file describing the package"
echo " --license file : Append a license file"
echo " --help-header file : Add a header to the archive's --help output"
echo " --packaging-date date"
echo " : Use provided string as the packaging date"
echo " instead of the current date."
echo
echo " --keep-umask : Keep the umask set to shell default, rather than overriding when executing self-extracting archive."
echo " --export-conf : Export configuration variables to startup_script"
echo
echo "Do not forget to give a fully qualified startup script name"
echo "(i.e. with a ./ prefix if inside the archive)."
exit 1
}
# Default settings
if type gzip >/dev/null 2>&1; then
COMPRESS=gzip
elif type compress >/dev/null 2>&1; then
COMPRESS=compress
else
echo "ERROR: missing commands: gzip, compress" >&2
MS_Usage
fi
ENCRYPT=n
PASSWD=""
PASSWD_SRC=""
OPENSSL_NO_MD=n
COMPRESS_LEVEL=9
DEFAULT_THREADS=123456 # Sentinel value
THREADS=$DEFAULT_THREADS
KEEP=n
CURRENT=n
NOX11=n
NOWAIT=n
APPEND=n
TAR_QUIETLY=n
KEEP_UMASK=n
QUIET=n
NOPROGRESS=n
COPY=none
NEED_ROOT=n
TAR_ARGS=rvf
TAR_FORMAT=ustar
TAR_EXTRA=""
GPG_EXTRA=""
DU_ARGS=-ks
HEADER=`dirname "$0"`/makeself-header.sh
SIGNATURE=""
TARGETDIR=""
NOOVERWRITE=n
DATE=`LC_ALL=C date`
EXPORT_CONF=n
SHA256=n
OWNERSHIP=n
SIGN=n
GPG_PASSPHRASE=""
# LSM file stuff
LSM_CMD="echo No LSM. >> \"\$archname\""
while true
do
case "$1" in
--version | -v)
echo Makeself version $MS_VERSION
exit 0
;;
--pbzip2)
COMPRESS=pbzip2
shift
;;
--bzip2)
COMPRESS=bzip2
shift
;;
--gzip)
COMPRESS=gzip
shift
;;
--pigz)
COMPRESS=pigz
shift
;;
--zstd)
COMPRESS=zstd
shift
;;
--xz)
COMPRESS=xz
shift
;;
--lzo)
COMPRESS=lzo
shift
;;
--lz4)
COMPRESS=lz4
shift
;;
--compress)
COMPRESS=compress
shift
;;
--base64)
COMPRESS=base64
shift
;;
--gpg-encrypt)
COMPRESS=gpg
shift
;;
--gpg-asymmetric-encrypt-sign)
COMPRESS=gpg-asymmetric
shift
;;
--gpg-extra)
GPG_EXTRA="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--ssl-encrypt)
ENCRYPT=openssl
shift
;;
--ssl-passwd)
PASSWD=$2
shift 2 || { MS_Usage; exit 1; }
;;
--ssl-pass-src)
PASSWD_SRC=$2
shift 2 || { MS_Usage; exit 1; }
;;
--ssl-no-md)
OPENSSL_NO_MD=y
shift
;;
--nocomp)
COMPRESS=none
shift
;;
--complevel)
COMPRESS_LEVEL="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--threads)
THREADS="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--nochown)
OWNERSHIP=n
shift
;;
--chown)
OWNERSHIP=y
shift
;;
--notemp)
KEEP=y
shift
;;
--copy)
COPY=copy
shift
;;
--current)
CURRENT=y
KEEP=y
shift
;;
--tar-format)
TAR_FORMAT="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--tar-extra)
TAR_EXTRA="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--untar-extra)
UNTAR_EXTRA="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--target)
TARGETDIR="$2"
KEEP=y
shift 2 || { MS_Usage; exit 1; }
;;
--sign)
SIGN=y
GPG_PASSPHRASE="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--nooverwrite)
NOOVERWRITE=y
shift
;;
--needroot)
NEED_ROOT=y
shift
;;
--header)
HEADER="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--cleanup)
CLEANUP_SCRIPT="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--license)
# We need to escape all characters having a special meaning in double quotes
LICENSE=$(sed 's/\\/\\\\/g; s/"/\\\"/g; s/`/\\\`/g; s/\$/\\\$/g' "$2")
shift 2 || { MS_Usage; exit 1; }
;;
--follow)
TAR_ARGS=rvhf
DU_ARGS=-ksL
shift
;;
--noprogress)
NOPROGRESS=y
shift
;;
--nox11)
NOX11=y
shift
;;
--nowait)
NOWAIT=y
shift
;;
--nomd5)
NOMD5=y
shift
;;
--sha256)
SHA256=y
shift
;;
--nocrc)
NOCRC=y
shift
;;
--append)
APPEND=y
shift
;;
--lsm)
LSM_CMD="cat \"$2\" >> \"\$archname\""
shift 2 || { MS_Usage; exit 1; }
;;
--packaging-date)
DATE="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--help-header)
HELPHEADER=`sed -e "s/'/'\\\\\''/g" $2`
shift 2 || { MS_Usage; exit 1; }
[ -n "$HELPHEADER" ] && HELPHEADER="$HELPHEADER
"
;;
--tar-quietly)
TAR_QUIETLY=y
shift
;;
--keep-umask)
KEEP_UMASK=y
shift
;;
--export-conf)
EXPORT_CONF=y
shift
;;
-q | --quiet)
QUIET=y
shift
;;
-h | --help)
MS_Usage
;;
-*)
echo Unrecognized flag : "$1"
MS_Usage
;;
*)
break
;;
esac
done
if test $# -lt 1; then
MS_Usage
else
if test -d "$1"; then
archdir="$1"
else
echo "Directory $1 does not exist." >&2
exit 1
fi
fi
archname="$2"
if test "$QUIET" = "y" || test "$TAR_QUIETLY" = "y"; then
if test "$TAR_ARGS" = "rvf"; then
TAR_ARGS="rf"
elif test "$TAR_ARGS" = "rvhf"; then
TAR_ARGS="rhf"
fi
fi
if test "$APPEND" = y; then
if test $# -lt 2; then
MS_Usage
fi
# Gather the info from the original archive
OLDENV=`sh "$archname" --dumpconf`
if test $? -ne 0; then
echo "Unable to update archive: $archname" >&2
exit 1
else
eval "$OLDENV"
OLDSKIP=`expr $SKIP + 1`
fi
else
if test "$KEEP" = n -a $# = 3; then
echo "ERROR: Making a temporary archive with no embedded command does not make sense!" >&2
echo >&2
MS_Usage
fi
# We don't want to create an absolute directory unless a target directory is defined
if test "$CURRENT" = y; then
archdirname="."
elif test x"$TARGETDIR" != x; then
archdirname="$TARGETDIR"
else
archdirname=`basename "$1"`
fi
if test $# -lt 3; then
MS_Usage
fi
LABEL="$3"
SCRIPT="$4"
test "x$SCRIPT" = x || shift 1
shift 3
SCRIPTARGS="$*"
fi
if test "$KEEP" = n -a "$CURRENT" = y; then
echo "ERROR: It is A VERY DANGEROUS IDEA to try to combine --notemp and --current." >&2
exit 1
fi
case $COMPRESS in
gzip)
GZIP_CMD="gzip -c$COMPRESS_LEVEL"
GUNZIP_CMD="gzip -cd"
;;
pigz)
GZIP_CMD="pigz -$COMPRESS_LEVEL"
if test $THREADS -ne $DEFAULT_THREADS; then # Leave as the default if threads not indicated
GZIP_CMD="$GZIP_CMD --processes $THREADS"
fi
GUNZIP_CMD="gzip -cd"
;;
zstd)
GZIP_CMD="zstd -$COMPRESS_LEVEL"
if test $THREADS -ne $DEFAULT_THREADS; then # Leave as the default if threads not indicated
GZIP_CMD="$GZIP_CMD --threads=$THREADS"
fi
GUNZIP_CMD="zstd -cd"
;;
pbzip2)
GZIP_CMD="pbzip2 -c$COMPRESS_LEVEL"
if test $THREADS -ne $DEFAULT_THREADS; then # Leave as the default if threads not indicated
GZIP_CMD="$GZIP_CMD -p$THREADS"
fi
GUNZIP_CMD="bzip2 -d"
;;
bzip2)
GZIP_CMD="bzip2 -$COMPRESS_LEVEL"
GUNZIP_CMD="bzip2 -d"
;;
xz)
GZIP_CMD="xz -c$COMPRESS_LEVEL"
# Must opt-in by specifying a value since not all versions of xz support threads
if test $THREADS -ne $DEFAULT_THREADS; then
GZIP_CMD="$GZIP_CMD --threads=$THREADS"
fi
GUNZIP_CMD="xz -d"
;;
lzo)
GZIP_CMD="lzop -c$COMPRESS_LEVEL"
GUNZIP_CMD="lzop -d"
;;
lz4)
GZIP_CMD="lz4 -c$COMPRESS_LEVEL"
GUNZIP_CMD="lz4 -d"
;;
base64)
GZIP_CMD="base64"
GUNZIP_CMD="base64 --decode -i -"
;;
gpg)
GZIP_CMD="gpg $GPG_EXTRA -ac -z$COMPRESS_LEVEL"
GUNZIP_CMD="gpg -d"
ENCRYPT="gpg"
;;
gpg-asymmetric)
GZIP_CMD="gpg $GPG_EXTRA -z$COMPRESS_LEVEL -es"
GUNZIP_CMD="gpg --yes -d"
ENCRYPT="gpg"
;;
compress)
GZIP_CMD="compress -fc"
GUNZIP_CMD="(type compress >/dev/null 2>&1 && compress -fcd || gzip -cd)"
;;
none)
GZIP_CMD="cat"
GUNZIP_CMD="cat"
;;
esac
if test x"$ENCRYPT" = x"openssl"; then
if test x"$APPEND" = x"y"; then
echo "Appending to existing archive is not compatible with OpenSSL encryption." >&2
fi
ENCRYPT_CMD="openssl enc -aes-256-cbc -salt"
DECRYPT_CMD="openssl enc -aes-256-cbc -d"
if test x"$OPENSSL_NO_MD" != x"y"; then
ENCRYPT_CMD="$ENCRYPT_CMD -md sha256"
DECRYPT_CMD="$DECRYPT_CMD -md sha256"
fi
if test -n "$PASSWD_SRC"; then
ENCRYPT_CMD="$ENCRYPT_CMD -pass $PASSWD_SRC"
elif test -n "$PASSWD"; then
ENCRYPT_CMD="$ENCRYPT_CMD -pass pass:$PASSWD"
fi
fi
tmpfile="${TMPDIR:-/tmp}/mkself$$"
if test -f "$HEADER"; then
oldarchname="$archname"
archname="$tmpfile"
# Generate a fake header to count its lines
SKIP=0
. "$HEADER"
SKIP=`cat "$tmpfile" |wc -l`
# Get rid of any spaces
SKIP=`expr $SKIP`
rm -f "$tmpfile"
if test "$QUIET" = "n"; then
echo "Header is $SKIP lines long" >&2
fi
archname="$oldarchname"
else
echo "Unable to open header file: $HEADER" >&2
exit 1
fi
if test "$QUIET" = "n"; then
echo
fi
if test "$APPEND" = n; then
if test -f "$archname"; then
echo "WARNING: Overwriting existing file: $archname" >&2
fi
fi
USIZE=`du $DU_ARGS "$archdir" | awk '{print $1}'`
if test "." = "$archdirname"; then
if test "$KEEP" = n; then
archdirname="makeself-$$-`date +%Y%m%d%H%M%S`"
fi
fi
test -d "$archdir" || { echo "Error: $archdir does not exist."; rm -f "$tmpfile"; exit 1; }
if test "$QUIET" = "n"; then
echo "About to compress $USIZE KB of data..."
echo "Adding files to archive named \"$archname\"..."
fi
# See if we have GNU tar
TAR=`exec <&- 2>&-; which gtar || command -v gtar || type gtar`
test -x "$TAR" || TAR=tar
tmparch="${TMPDIR:-/tmp}/mkself$$.tar"
(
if test "$APPEND" = "y"; then
tail -n "+$OLDSKIP" "$archname" | eval "$GUNZIP_CMD" > "$tmparch"
fi
cd "$archdir"
# "Determining if a directory is empty"
# https://www.etalabs.net/sh_tricks.html
find . \
\( \
! -type d \
-o \
\( -links 2 -exec sh -c '
is_empty () (
cd "$1"
set -- .[!.]* ; test -f "$1" && return 1
set -- ..?* ; test -f "$1" && return 1
set -- * ; test -f "$1" && return 1
return 0
)
is_empty "$0"' {} \; \
\) \
\) -print \
| LC_ALL=C sort \
| sed 's/./\\&/g' \
| xargs $TAR $TAR_EXTRA --format $TAR_FORMAT -$TAR_ARGS "$tmparch"
) || {
echo "ERROR: failed to create temporary archive: $tmparch"
rm -f "$tmparch" "$tmpfile"
exit 1
}
USIZE=`du $DU_ARGS "$tmparch" | awk '{print $1}'`
eval "$GZIP_CMD" <"$tmparch" >"$tmpfile" || {
echo "ERROR: failed to create temporary file: $tmpfile"
rm -f "$tmparch" "$tmpfile"
exit 1
}
rm -f "$tmparch"
if test x"$ENCRYPT" = x"openssl"; then
echo "About to encrypt archive \"$archname\"..."
{ eval "$ENCRYPT_CMD -in $tmpfile -out ${tmpfile}.enc" && mv -f ${tmpfile}.enc $tmpfile; } || \
{ echo Aborting: could not encrypt temporary file: "$tmpfile".; rm -f "$tmpfile"; exit 1; }
fi
fsize=`cat "$tmpfile" | wc -c | tr -d " "`
# Compute the checksums
shasum=0000000000000000000000000000000000000000000000000000000000000000
md5sum=00000000000000000000000000000000
crcsum=0000000000
if test "$NOCRC" = y; then
if test "$QUIET" = "n"; then
echo "skipping crc at user request"
fi
else
crcsum=`CMD_ENV=xpg4 cksum < "$tmpfile" | sed -e 's/ /Z/' -e 's/ /Z/' | cut -dZ -f1`
if test "$QUIET" = "n"; then
echo "CRC: $crcsum"
fi
fi
if test "$SHA256" = y; then
SHA_PATH=`exec <&- 2>&-; which shasum || command -v shasum || type shasum`
if test -x "$SHA_PATH"; then
shasum=`eval "$SHA_PATH -a 256" < "$tmpfile" | cut -b-64`
else
SHA_PATH=`exec <&- 2>&-; which sha256sum || command -v sha256sum || type sha256sum`
shasum=`eval "$SHA_PATH" < "$tmpfile" | cut -b-64`
fi
if test "$QUIET" = "n"; then
if test -x "$SHA_PATH"; then
echo "SHA256: $shasum"
else
echo "SHA256: none, SHA command not found"
fi
fi
fi
if test "$NOMD5" = y; then
if test "$QUIET" = "n"; then
echo "Skipping md5sum at user request"
fi
else
# Try to locate a MD5 binary
OLD_PATH=$PATH
PATH=${GUESS_MD5_PATH:-"$OLD_PATH:/bin:/usr/bin:/sbin:/usr/local/ssl/bin:/usr/local/bin:/opt/openssl/bin"}
MD5_ARG=""
MD5_PATH=`exec <&- 2>&-; which md5sum || command -v md5sum || type md5sum`
test -x "$MD5_PATH" || MD5_PATH=`exec <&- 2>&-; which md5 || command -v md5 || type md5`
test -x "$MD5_PATH" || MD5_PATH=`exec <&- 2>&-; which digest || command -v digest || type digest`
PATH=$OLD_PATH
if test -x "$MD5_PATH"; then
if test `basename ${MD5_PATH}`x = digestx; then
MD5_ARG="-a md5"
fi
md5sum=`eval "$MD5_PATH $MD5_ARG" < "$tmpfile" | cut -b-32`
if test "$QUIET" = "n"; then
echo "MD5: $md5sum"
fi
else
if test "$QUIET" = "n"; then
echo "MD5: none, MD5 command not found"
fi
fi
fi
if test "$SIGN" = y; then
GPG_PATH=`exec <&- 2>&-; which gpg || command -v gpg || type gpg`
if test -x "$GPG_PATH"; then
SIGNATURE=`$GPG_PATH --pinentry-mode=loopback --batch --yes --passphrase "$GPG_PASSPHRASE" --output - --detach-sig $tmpfile | base64 | tr -d \\\\n`
if test "$QUIET" = "n"; then
echo "Signature: $SIGNATURE"
fi
else
echo "Missing gpg command" >&2
fi
fi
totalsize=0
for size in $fsize;
do
totalsize=`expr $totalsize + $size`
done
if test "$APPEND" = y; then
mv "$archname" "$archname".bak || exit
# Prepare entry for new archive
filesizes="$fsize"
CRCsum="$crcsum"
MD5sum="$md5sum"
SHAsum="$shasum"
Signature="$SIGNATURE"
# Generate the header
. "$HEADER"
# Append the new data
cat "$tmpfile" >> "$archname"
chmod +x "$archname"
rm -f "$archname".bak
if test "$QUIET" = "n"; then
echo "Self-extractable archive \"$archname\" successfully updated."
fi
else
filesizes="$fsize"
CRCsum="$crcsum"
MD5sum="$md5sum"
SHAsum="$shasum"
Signature="$SIGNATURE"
# Generate the header
. "$HEADER"
# Append the compressed tar data after the stub
if test "$QUIET" = "n"; then
echo
fi
cat "$tmpfile" >> "$archname"
chmod +x "$archname"
if test "$QUIET" = "n"; then
echo Self-extractable archive \"$archname\" successfully created.
fi
fi
rm -f "$tmpfile"

View File

@ -1,822 +0,0 @@
#!/bin/sh
#
# Makeself version 2.4.x
# by Stephane Peter <megastep@megastep.org>
#
# Utility to create self-extracting tar.gz archives.
# The resulting archive is a file holding the tar.gz archive with
# a small Shell script stub that uncompresses the archive to a temporary
# directory and then executes a given script from withing that directory.
#
# Makeself home page: https://makeself.io/
#
# Version 2.0 is a rewrite of version 1.0 to make the code easier to read and maintain.
#
# Version history :
# - 1.0 : Initial public release
# - 1.1 : The archive can be passed parameters that will be passed on to
# the embedded script, thanks to John C. Quillan
# - 1.2 : Package distribution, bzip2 compression, more command line options,
# support for non-temporary archives. Ideas thanks to Francois Petitjean
# - 1.3 : More patches from Bjarni R. Einarsson and Francois Petitjean:
# Support for no compression (--nocomp), script is no longer mandatory,
# automatic launch in an xterm, optional verbose output, and -target
# archive option to indicate where to extract the files.
# - 1.4 : Improved UNIX compatibility (Francois Petitjean)
# Automatic integrity checking, support of LSM files (Francois Petitjean)
# - 1.5 : Many bugfixes. Optionally disable xterm spawning.
# - 1.5.1 : More bugfixes, added archive options -list and -check.
# - 1.5.2 : Cosmetic changes to inform the user of what's going on with big
# archives (Quake III demo)
# - 1.5.3 : Check for validity of the DISPLAY variable before launching an xterm.
# More verbosity in xterms and check for embedded command's return value.
# Bugfix for Debian 2.0 systems that have a different "print" command.
# - 1.5.4 : Many bugfixes. Print out a message if the extraction failed.
# - 1.5.5 : More bugfixes. Added support for SETUP_NOCHECK environment variable to
# bypass checksum verification of archives.
# - 1.6.0 : Compute MD5 checksums with the md5sum command (patch from Ryan Gordon)
# - 2.0 : Brand new rewrite, cleaner architecture, separated header and UNIX ports.
# - 2.0.1 : Added --copy
# - 2.1.0 : Allow multiple tarballs to be stored in one archive, and incremental updates.
# Added --nochown for archives
# Stopped doing redundant checksums when not necesary
# - 2.1.1 : Work around insane behavior from certain Linux distros with no 'uncompress' command
# Cleaned up the code to handle error codes from compress. Simplified the extraction code.
# - 2.1.2 : Some bug fixes. Use head -n to avoid problems.
# - 2.1.3 : Bug fixes with command line when spawning terminals.
# Added --tar for archives, allowing to give arbitrary arguments to tar on the contents of the archive.
# Added --noexec to prevent execution of embedded scripts.
# Added --nomd5 and --nocrc to avoid creating checksums in archives.
# Added command used to create the archive in --info output.
# Run the embedded script through eval.
# - 2.1.4 : Fixed --info output.
# Generate random directory name when extracting files to . to avoid problems. (Jason Trent)
# Better handling of errors with wrong permissions for the directory containing the files. (Jason Trent)
# Avoid some race conditions (Ludwig Nussel)
# Unset the $CDPATH variable to avoid problems if it is set. (Debian)
# Better handling of dot files in the archive directory.
# - 2.1.5 : Made the md5sum detection consistent with the header code.
# Check for the presence of the archive directory
# Added --encrypt for symmetric encryption through gpg (Eric Windisch)
# Added support for the digest command on Solaris 10 for MD5 checksums
# Check for available disk space before extracting to the target directory (Andreas Schweitzer)
# Allow extraction to run asynchronously (patch by Peter Hatch)
# Use file descriptors internally to avoid error messages (patch by Kay Tiong Khoo)
# - 2.1.6 : Replaced one dot per file progress with a realtime progress percentage and a spining cursor (Guy Baconniere)
# Added --noprogress to prevent showing the progress during the decompression (Guy Baconniere)
# Added --target dir to allow extracting directly to a target directory (Guy Baconniere)
# - 2.2.0 : Many bugfixes, updates and contributions from users. Check out the project page on Github for the details.
# - 2.3.0 : Option to specify packaging date to enable byte-for-byte reproducibility. (Marc Pawlowsky)
# - 2.4.0 : Optional support for SHA256 checksums in archives.
# - 2.4.2 : Add support for threads for several compressors. (M. Limber)
# Added zstd support.
# - 2.4.3 : Make explicit POSIX tar archives for increased compatibility.
# - 2.4.5 : Added --tar-format to override ustar tar archive format
#
# (C) 1998-2021 by Stephane Peter <megastep@megastep.org>
#
# This software is released under the terms of the GNU GPL version 2 and above
# Please read the license at http://www.gnu.org/copyleft/gpl.html
# Self-extracting archives created with this script are explictly NOT released under the term of the GPL
#
MS_VERSION=2.4.5
MS_COMMAND="$0"
unset CDPATH
for f in ${1+"$@"}; do
MS_COMMAND="$MS_COMMAND \\\\
\\\"$f\\\""
done
# For Solaris systems
if test -d /usr/xpg4/bin; then
PATH=/usr/xpg4/bin:$PATH
export PATH
fi
# Procedures
MS_Usage()
{
echo "Usage: $0 [args] archive_dir file_name label startup_script [script_args]"
echo "args can be one or more of the following :"
echo " --version | -v : Print out Makeself version number and exit"
echo " --help | -h : Print out this help message"
echo " --tar-quietly : Suppress verbose output from the tar command"
echo " --quiet | -q : Do not print any messages other than errors."
echo " --gzip : Compress using gzip (default if detected)"
echo " --pigz : Compress with pigz"
echo " --zstd : Compress with zstd"
echo " --bzip2 : Compress using bzip2 instead of gzip"
echo " --pbzip2 : Compress using pbzip2 instead of gzip"
echo " --xz : Compress using xz instead of gzip"
echo " --lzo : Compress using lzop instead of gzip"
echo " --lz4 : Compress using lz4 instead of gzip"
echo " --compress : Compress using the UNIX 'compress' command"
echo " --complevel lvl : Compression level for gzip pigz zstd xz lzo lz4 bzip2 and pbzip2 (default 9)"
echo " --threads thds : Number of threads to be used by compressors that support parallelization."
echo " Omit to use compressor's default. Most useful (and required) for opting"
echo " into xz's threading, usually with '--threads=0' for all available cores."
echo " pbzip2 and pigz are parallel by default, and setting this value allows"
echo " limiting the number of threads they use."
echo " --base64 : Instead of compressing, encode the data using base64"
echo " --gpg-encrypt : Instead of compressing, encrypt the data using GPG"
echo " --gpg-asymmetric-encrypt-sign"
echo " : Instead of compressing, asymmetrically encrypt and sign the data using GPG"
echo " --gpg-extra opt : Append more options to the gpg command line"
echo " --ssl-encrypt : Instead of compressing, encrypt the data using OpenSSL"
echo " --ssl-passwd pass : Use the given password to encrypt the data using OpenSSL"
echo " --ssl-pass-src src : Use the given src as the source of password to encrypt the data"
echo " using OpenSSL. See \"PASS PHRASE ARGUMENTS\" in man openssl."
echo " If this option is not supplied, the user will be asked to enter"
echo " encryption password on the current terminal."
echo " --ssl-no-md : Do not use \"-md\" option not supported by older OpenSSL."
echo " --nochown : Do not give the target folder to the current user (default)"
echo " --chown : Give the target folder to the current user recursively"
echo " --nocomp : Do not compress the data"
echo " --notemp : The archive will create archive_dir in the"
echo " current directory and uncompress in ./archive_dir"
echo " --needroot : Check that the root user is extracting the archive before proceeding"
echo " --copy : Upon extraction, the archive will first copy itself to"
echo " a temporary directory"
echo " --append : Append more files to an existing Makeself archive"
echo " The label and startup scripts will then be ignored"
echo " --target dir : Extract directly to a target directory"
echo " directory path can be either absolute or relative"
echo " --nooverwrite : Do not extract the archive if the specified target directory exists"
echo " --current : Files will be extracted to the current directory"
echo " Both --current and --target imply --notemp"
echo " --tar-format opt : Specify a tar archive format (default is ustar)"
echo " --tar-extra opt : Append more options to the tar command line"
echo " --untar-extra opt : Append more options to the during the extraction of the tar archive"
echo " --nomd5 : Don't calculate an MD5 for archive"
echo " --nocrc : Don't calculate a CRC for archive"
echo " --sha256 : Compute a SHA256 checksum for the archive"
echo " --header file : Specify location of the header script"
echo " --cleanup file : Specify a cleanup script that executes on interrupt and when finished successfully."
echo " --follow : Follow the symlinks in the archive"
echo " --noprogress : Do not show the progress during the decompression"
echo " --nox11 : Disable automatic spawn of a xterm"
echo " --nowait : Do not wait for user input after executing embedded"
echo " program from an xterm"
echo " --sign passphrase : Signature private key to sign the package with"
echo " --lsm file : LSM file describing the package"
echo " --license file : Append a license file"
echo " --help-header file : Add a header to the archive's --help output"
echo " --packaging-date date"
echo " : Use provided string as the packaging date"
echo " instead of the current date."
echo
echo " --keep-umask : Keep the umask set to shell default, rather than overriding when executing self-extracting archive."
echo " --export-conf : Export configuration variables to startup_script"
echo
echo "Do not forget to give a fully qualified startup script name"
echo "(i.e. with a ./ prefix if inside the archive)."
exit 1
}
# Default settings
if type gzip >/dev/null 2>&1; then
COMPRESS=gzip
elif type compress >/dev/null 2>&1; then
COMPRESS=compress
else
echo "ERROR: missing commands: gzip, compress" >&2
MS_Usage
fi
ENCRYPT=n
PASSWD=""
PASSWD_SRC=""
OPENSSL_NO_MD=n
COMPRESS_LEVEL=9
DEFAULT_THREADS=123456 # Sentinel value
THREADS=$DEFAULT_THREADS
KEEP=n
CURRENT=n
NOX11=n
NOWAIT=n
APPEND=n
TAR_QUIETLY=n
KEEP_UMASK=n
QUIET=n
NOPROGRESS=n
COPY=none
NEED_ROOT=n
TAR_ARGS=rvf
TAR_FORMAT=ustar
TAR_EXTRA=""
GPG_EXTRA=""
DU_ARGS=-ks
HEADER=`dirname "$0"`/makeself-header.sh
SIGNATURE=""
TARGETDIR=""
NOOVERWRITE=n
DATE=`LC_ALL=C date`
EXPORT_CONF=n
SHA256=n
OWNERSHIP=n
SIGN=n
GPG_PASSPHRASE=""
# LSM file stuff
LSM_CMD="echo No LSM. >> \"\$archname\""
while true
do
case "$1" in
--version | -v)
echo Makeself version $MS_VERSION
exit 0
;;
--pbzip2)
COMPRESS=pbzip2
shift
;;
--bzip2)
COMPRESS=bzip2
shift
;;
--gzip)
COMPRESS=gzip
shift
;;
--pigz)
COMPRESS=pigz
shift
;;
--zstd)
COMPRESS=zstd
shift
;;
--xz)
COMPRESS=xz
shift
;;
--lzo)
COMPRESS=lzo
shift
;;
--lz4)
COMPRESS=lz4
shift
;;
--compress)
COMPRESS=compress
shift
;;
--base64)
COMPRESS=base64
shift
;;
--gpg-encrypt)
COMPRESS=gpg
shift
;;
--gpg-asymmetric-encrypt-sign)
COMPRESS=gpg-asymmetric
shift
;;
--gpg-extra)
GPG_EXTRA="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--ssl-encrypt)
ENCRYPT=openssl
shift
;;
--ssl-passwd)
PASSWD=$2
shift 2 || { MS_Usage; exit 1; }
;;
--ssl-pass-src)
PASSWD_SRC=$2
shift 2 || { MS_Usage; exit 1; }
;;
--ssl-no-md)
OPENSSL_NO_MD=y
shift
;;
--nocomp)
COMPRESS=none
shift
;;
--complevel)
COMPRESS_LEVEL="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--threads)
THREADS="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--nochown)
OWNERSHIP=n
shift
;;
--chown)
OWNERSHIP=y
shift
;;
--notemp)
KEEP=y
shift
;;
--copy)
COPY=copy
shift
;;
--current)
CURRENT=y
KEEP=y
shift
;;
--tar-format)
TAR_FORMAT="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--tar-extra)
TAR_EXTRA="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--untar-extra)
UNTAR_EXTRA="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--target)
TARGETDIR="$2"
KEEP=y
shift 2 || { MS_Usage; exit 1; }
;;
--sign)
SIGN=y
GPG_PASSPHRASE="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--nooverwrite)
NOOVERWRITE=y
shift
;;
--needroot)
NEED_ROOT=y
shift
;;
--header)
HEADER="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--cleanup)
CLEANUP_SCRIPT="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--license)
# We need to escape all characters having a special meaning in double quotes
LICENSE=$(sed 's/\\/\\\\/g; s/"/\\\"/g; s/`/\\\`/g; s/\$/\\\$/g' "$2")
shift 2 || { MS_Usage; exit 1; }
;;
--follow)
TAR_ARGS=rvhf
DU_ARGS=-ksL
shift
;;
--noprogress)
NOPROGRESS=y
shift
;;
--nox11)
NOX11=y
shift
;;
--nowait)
NOWAIT=y
shift
;;
--nomd5)
NOMD5=y
shift
;;
--sha256)
SHA256=y
shift
;;
--nocrc)
NOCRC=y
shift
;;
--append)
APPEND=y
shift
;;
--lsm)
LSM_CMD="cat \"$2\" >> \"\$archname\""
shift 2 || { MS_Usage; exit 1; }
;;
--packaging-date)
DATE="$2"
shift 2 || { MS_Usage; exit 1; }
;;
--help-header)
HELPHEADER=`sed -e "s/'/'\\\\\''/g" $2`
shift 2 || { MS_Usage; exit 1; }
[ -n "$HELPHEADER" ] && HELPHEADER="$HELPHEADER
"
;;
--tar-quietly)
TAR_QUIETLY=y
shift
;;
--keep-umask)
KEEP_UMASK=y
shift
;;
--export-conf)
EXPORT_CONF=y
shift
;;
-q | --quiet)
QUIET=y
shift
;;
-h | --help)
MS_Usage
;;
-*)
echo Unrecognized flag : "$1"
MS_Usage
;;
*)
break
;;
esac
done
if test $# -lt 1; then
MS_Usage
else
if test -d "$1"; then
archdir="$1"
else
echo "Directory $1 does not exist." >&2
exit 1
fi
fi
archname="$2"
if test "$QUIET" = "y" || test "$TAR_QUIETLY" = "y"; then
if test "$TAR_ARGS" = "rvf"; then
TAR_ARGS="rf"
elif test "$TAR_ARGS" = "rvhf"; then
TAR_ARGS="rhf"
fi
fi
if test "$APPEND" = y; then
if test $# -lt 2; then
MS_Usage
fi
# Gather the info from the original archive
OLDENV=`sh "$archname" --dumpconf`
if test $? -ne 0; then
echo "Unable to update archive: $archname" >&2
exit 1
else
eval "$OLDENV"
OLDSKIP=`expr $SKIP + 1`
fi
else
if test "$KEEP" = n -a $# = 3; then
echo "ERROR: Making a temporary archive with no embedded command does not make sense!" >&2
echo >&2
MS_Usage
fi
# We don't want to create an absolute directory unless a target directory is defined
if test "$CURRENT" = y; then
archdirname="."
elif test x"$TARGETDIR" != x; then
archdirname="$TARGETDIR"
else
archdirname=`basename "$1"`
fi
if test $# -lt 3; then
MS_Usage
fi
LABEL="$3"
SCRIPT="$4"
test "x$SCRIPT" = x || shift 1
shift 3
SCRIPTARGS="$*"
fi
if test "$KEEP" = n -a "$CURRENT" = y; then
echo "ERROR: It is A VERY DANGEROUS IDEA to try to combine --notemp and --current." >&2
exit 1
fi
case $COMPRESS in
gzip)
GZIP_CMD="gzip -c$COMPRESS_LEVEL"
GUNZIP_CMD="gzip -cd"
;;
pigz)
GZIP_CMD="pigz -$COMPRESS_LEVEL"
if test $THREADS -ne $DEFAULT_THREADS; then # Leave as the default if threads not indicated
GZIP_CMD="$GZIP_CMD --processes $THREADS"
fi
GUNZIP_CMD="gzip -cd"
;;
zstd)
GZIP_CMD="zstd -$COMPRESS_LEVEL"
if test $THREADS -ne $DEFAULT_THREADS; then # Leave as the default if threads not indicated
GZIP_CMD="$GZIP_CMD --threads=$THREADS"
fi
GUNZIP_CMD="zstd -cd"
;;
pbzip2)
GZIP_CMD="pbzip2 -c$COMPRESS_LEVEL"
if test $THREADS -ne $DEFAULT_THREADS; then # Leave as the default if threads not indicated
GZIP_CMD="$GZIP_CMD -p$THREADS"
fi
GUNZIP_CMD="bzip2 -d"
;;
bzip2)
GZIP_CMD="bzip2 -$COMPRESS_LEVEL"
GUNZIP_CMD="bzip2 -d"
;;
xz)
GZIP_CMD="xz -c$COMPRESS_LEVEL"
# Must opt-in by specifying a value since not all versions of xz support threads
if test $THREADS -ne $DEFAULT_THREADS; then
GZIP_CMD="$GZIP_CMD --threads=$THREADS"
fi
GUNZIP_CMD="xz -d"
;;
lzo)
GZIP_CMD="lzop -c$COMPRESS_LEVEL"
GUNZIP_CMD="lzop -d"
;;
lz4)
GZIP_CMD="lz4 -c$COMPRESS_LEVEL"
GUNZIP_CMD="lz4 -d"
;;
base64)
GZIP_CMD="base64"
GUNZIP_CMD="base64 --decode -i -"
;;
gpg)
GZIP_CMD="gpg $GPG_EXTRA -ac -z$COMPRESS_LEVEL"
GUNZIP_CMD="gpg -d"
ENCRYPT="gpg"
;;
gpg-asymmetric)
GZIP_CMD="gpg $GPG_EXTRA -z$COMPRESS_LEVEL -es"
GUNZIP_CMD="gpg --yes -d"
ENCRYPT="gpg"
;;
compress)
GZIP_CMD="compress -fc"
GUNZIP_CMD="(type compress >/dev/null 2>&1 && compress -fcd || gzip -cd)"
;;
none)
GZIP_CMD="cat"
GUNZIP_CMD="cat"
;;
esac
if test x"$ENCRYPT" = x"openssl"; then
if test x"$APPEND" = x"y"; then
echo "Appending to existing archive is not compatible with OpenSSL encryption." >&2
fi
ENCRYPT_CMD="openssl enc -aes-256-cbc -salt"
DECRYPT_CMD="openssl enc -aes-256-cbc -d"
if test x"$OPENSSL_NO_MD" != x"y"; then
ENCRYPT_CMD="$ENCRYPT_CMD -md sha256"
DECRYPT_CMD="$DECRYPT_CMD -md sha256"
fi
if test -n "$PASSWD_SRC"; then
ENCRYPT_CMD="$ENCRYPT_CMD -pass $PASSWD_SRC"
elif test -n "$PASSWD"; then
ENCRYPT_CMD="$ENCRYPT_CMD -pass pass:$PASSWD"
fi
fi
tmpfile="${TMPDIR:-/tmp}/mkself$$"
if test -f "$HEADER"; then
oldarchname="$archname"
archname="$tmpfile"
# Generate a fake header to count its lines
SKIP=0
. "$HEADER"
SKIP=`cat "$tmpfile" |wc -l`
# Get rid of any spaces
SKIP=`expr $SKIP`
rm -f "$tmpfile"
if test "$QUIET" = "n"; then
echo "Header is $SKIP lines long" >&2
fi
archname="$oldarchname"
else
echo "Unable to open header file: $HEADER" >&2
exit 1
fi
if test "$QUIET" = "n"; then
echo
fi
if test "$APPEND" = n; then
if test -f "$archname"; then
echo "WARNING: Overwriting existing file: $archname" >&2
fi
fi
USIZE=`du $DU_ARGS "$archdir" | awk '{print $1}'`
if test "." = "$archdirname"; then
if test "$KEEP" = n; then
archdirname="makeself-$$-`date +%Y%m%d%H%M%S`"
fi
fi
test -d "$archdir" || { echo "Error: $archdir does not exist."; rm -f "$tmpfile"; exit 1; }
if test "$QUIET" = "n"; then
echo "About to compress $USIZE KB of data..."
echo "Adding files to archive named \"$archname\"..."
fi
# See if we have GNU tar
TAR=`exec <&- 2>&-; which gtar || command -v gtar || type gtar`
test -x "$TAR" || TAR=tar
tmparch="${TMPDIR:-/tmp}/mkself$$.tar"
(
if test "$APPEND" = "y"; then
tail -n "+$OLDSKIP" "$archname" | eval "$GUNZIP_CMD" > "$tmparch"
fi
cd "$archdir"
# "Determining if a directory is empty"
# https://www.etalabs.net/sh_tricks.html
find . \
\( \
! -type d \
-o \
\( -links 2 -exec sh -c '
is_empty () (
cd "$1"
set -- .[!.]* ; test -f "$1" && return 1
set -- ..?* ; test -f "$1" && return 1
set -- * ; test -f "$1" && return 1
return 0
)
is_empty "$0"' {} \; \
\) \
\) -print \
| LC_ALL=C sort \
| sed 's/./\\&/g' \
| xargs $TAR $TAR_EXTRA --format $TAR_FORMAT -$TAR_ARGS "$tmparch"
) || {
echo "ERROR: failed to create temporary archive: $tmparch"
rm -f "$tmparch" "$tmpfile"
exit 1
}
USIZE=`du $DU_ARGS "$tmparch" | awk '{print $1}'`
eval "$GZIP_CMD" <"$tmparch" >"$tmpfile" || {
echo "ERROR: failed to create temporary file: $tmpfile"
rm -f "$tmparch" "$tmpfile"
exit 1
}
rm -f "$tmparch"
if test x"$ENCRYPT" = x"openssl"; then
echo "About to encrypt archive \"$archname\"..."
{ eval "$ENCRYPT_CMD -in $tmpfile -out ${tmpfile}.enc" && mv -f ${tmpfile}.enc $tmpfile; } || \
{ echo Aborting: could not encrypt temporary file: "$tmpfile".; rm -f "$tmpfile"; exit 1; }
fi
fsize=`cat "$tmpfile" | wc -c | tr -d " "`
# Compute the checksums
shasum=0000000000000000000000000000000000000000000000000000000000000000
md5sum=00000000000000000000000000000000
crcsum=0000000000
if test "$NOCRC" = y; then
if test "$QUIET" = "n"; then
echo "skipping crc at user request"
fi
else
crcsum=`CMD_ENV=xpg4 cksum < "$tmpfile" | sed -e 's/ /Z/' -e 's/ /Z/' | cut -dZ -f1`
if test "$QUIET" = "n"; then
echo "CRC: $crcsum"
fi
fi
if test "$SHA256" = y; then
SHA_PATH=`exec <&- 2>&-; which shasum || command -v shasum || type shasum`
if test -x "$SHA_PATH"; then
shasum=`eval "$SHA_PATH -a 256" < "$tmpfile" | cut -b-64`
else
SHA_PATH=`exec <&- 2>&-; which sha256sum || command -v sha256sum || type sha256sum`
shasum=`eval "$SHA_PATH" < "$tmpfile" | cut -b-64`
fi
if test "$QUIET" = "n"; then
if test -x "$SHA_PATH"; then
echo "SHA256: $shasum"
else
echo "SHA256: none, SHA command not found"
fi
fi
fi
if test "$NOMD5" = y; then
if test "$QUIET" = "n"; then
echo "Skipping md5sum at user request"
fi
else
# Try to locate a MD5 binary
OLD_PATH=$PATH
PATH=${GUESS_MD5_PATH:-"$OLD_PATH:/bin:/usr/bin:/sbin:/usr/local/ssl/bin:/usr/local/bin:/opt/openssl/bin"}
MD5_ARG=""
MD5_PATH=`exec <&- 2>&-; which md5sum || command -v md5sum || type md5sum`
test -x "$MD5_PATH" || MD5_PATH=`exec <&- 2>&-; which md5 || command -v md5 || type md5`
test -x "$MD5_PATH" || MD5_PATH=`exec <&- 2>&-; which digest || command -v digest || type digest`
PATH=$OLD_PATH
if test -x "$MD5_PATH"; then
if test `basename ${MD5_PATH}`x = digestx; then
MD5_ARG="-a md5"
fi
md5sum=`eval "$MD5_PATH $MD5_ARG" < "$tmpfile" | cut -b-32`
if test "$QUIET" = "n"; then
echo "MD5: $md5sum"
fi
else
if test "$QUIET" = "n"; then
echo "MD5: none, MD5 command not found"
fi
fi
fi
if test "$SIGN" = y; then
GPG_PATH=`exec <&- 2>&-; which gpg || command -v gpg || type gpg`
if test -x "$GPG_PATH"; then
SIGNATURE=`$GPG_PATH --pinentry-mode=loopback --batch --yes --passphrase "$GPG_PASSPHRASE" --output - --detach-sig $tmpfile | base64 | tr -d \\\\n`
if test "$QUIET" = "n"; then
echo "Signature: $SIGNATURE"
fi
else
echo "Missing gpg command" >&2
fi
fi
totalsize=0
for size in $fsize;
do
totalsize=`expr $totalsize + $size`
done
if test "$APPEND" = y; then
mv "$archname" "$archname".bak || exit
# Prepare entry for new archive
filesizes="$fsize"
CRCsum="$crcsum"
MD5sum="$md5sum"
SHAsum="$shasum"
Signature="$SIGNATURE"
# Generate the header
. "$HEADER"
# Append the new data
cat "$tmpfile" >> "$archname"
chmod +x "$archname"
rm -f "$archname".bak
if test "$QUIET" = "n"; then
echo "Self-extractable archive \"$archname\" successfully updated."
fi
else
filesizes="$fsize"
CRCsum="$crcsum"
MD5sum="$md5sum"
SHAsum="$shasum"
Signature="$SIGNATURE"
# Generate the header
. "$HEADER"
# Append the compressed tar data after the stub
if test "$QUIET" = "n"; then
echo
fi
cat "$tmpfile" >> "$archname"
chmod +x "$archname"
if test "$QUIET" = "n"; then
echo Self-extractable archive \"$archname\" successfully created.
fi
fi
rm -f "$tmpfile"