Merge branch 'braid/character-encoding' into 'main'

fix: support compressed archives and advanced character encoding

See merge request TheOneWithTheBraid/dart_pkpass!5
This commit is contained in:
The one with the braid 2024-07-11 09:54:58 +00:00
commit ccd7e7e1ee
5 changed files with 149 additions and 33 deletions

79
.gitlab-ci.yml Normal file
View file

@ -0,0 +1,79 @@
variables:
FLUTTER_VERSION: 3.22.2
image: registry.gitlab.com/theonewiththebraid/flutter-dockerimages:${FLUTTER_VERSION}-base
stages:
- coverage
- deploy
- publish
workflow:
rules:
- if: $CI_MERGE_REQUEST_IID
- if: $CI_COMMIT_TAG
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
code_analyze:
stage: coverage
dependencies: []
script:
- dart pub get
- dart format lib test example --set-exit-if-changed
- dart analyze
- dart run import_sorter:main --no-comments --exit-if-changed
dart_test:
stage: coverage
image: dart
dependencies: [
code_analyze
]
script:
- dart pub get
- dart test
code_quality:
stage: coverage
image: dart
before_script:
- dart pub global activate dart_code_metrics
script:
- dart pub global run dart_code_metrics:metrics analyze lib -r gitlab > code-quality-report.json
artifacts:
reports:
codequality: code-quality-report.json
# also create an actual artifact for inspection purposes
paths:
- code-quality-report.json
dry-run:
stage: publish
image: dart
script:
- rm -rf ./docs
- dart pub get
- dart pub publish --dry-run
pub-dev:
stage: publish
image: dart
dependencies: [
dry-run
]
script:
- rm -rf ./docs
- |
if [ -z "${PUB_DEV_CREDENTIALS}" ]; then
echo "Missing PUB_DEV_CREDENTIALS environment variable"
exit 1
fi
mkdir -p ~/.config/dart
cp "${PUB_DEV_CREDENTIALS}" ~/.config/dart/pub-credentials.json
- dart pub get
- dart pub publish --force
rules:
- if: $CI_COMMIT_TAG

View file

@ -61,6 +61,8 @@ The following dependencies are used to correctly parse the PkPass file into a re
bytes.
- [`pub:barcode`](https://pub.dev/packages/barcode): Used to provide high-level access to barcode generation with the
proper encoding supported.
- [`pub:charset`](https://pub.dev/packages/charset): Used to gather the character encoding of files in the PkPass
archives.
- [`pub:crypto`](https://pub.dev/packages/crypto): Used for SHA1 signature verification as defined in the PkPass spec.
- [`pub:intl`](https://pub.dev/packages/intl): Used for localization lookup of localizable resources like Strings or
assets.
@ -102,4 +104,4 @@ Future<int> main(List<String> args) async {
Like this project? [Buy me a Coffee](https://www.buymeacoffee.com/braid).
License : EUPL-1.2
License : [EUPL-1.2](LICENSE)

View file

@ -2,6 +2,7 @@ import 'dart:convert';
import 'dart:typed_data';
import 'package:archive/archive.dart';
import 'package:charset/charset.dart';
import 'package:crypto/crypto.dart';
import 'package:intl/locale.dart';
@ -9,9 +10,6 @@ import 'package:pkpass/pkpass.dart';
import 'package:pkpass/pkpass/utils/file_matcher.dart';
import 'package:pkpass/pkpass/utils/lproj_parser.dart';
final _utf8codec = Utf8Codec();
final _jsonCodec = JsonCodec();
class PassFile {
const PassFile(this.metadata, this._folder);
@ -26,18 +24,9 @@ class PassFile {
Map<String, String> manifest;
try {
final file = archive.files
.singleWhere((element) => element.name == 'manifest.json');
manifest = (_jsonCodec.decode(
_utf8codec.decode(
file.rawContent?.toUint8List() ?? (file.content as Uint8List),
),
) as Map)
.cast<String, String>();
} catch (e) {
throw ManifestNotFoundError();
}
final file =
archive.files.singleWhere((element) => element.name == 'manifest.json');
manifest = (json.decode(file.stringContent) as Map).cast<String, String>();
final folder = <ArchiveFile>[];
@ -47,8 +36,7 @@ class PassFile {
final file = archive.files
.singleWhere((element) => element.name == manifestEntry.key);
final content =
file.rawContent?.toUint8List() ?? file.content as Uint8List;
final content = file.byteContent;
String hash = sha1.convert(content).toString();
@ -67,7 +55,7 @@ class PassFile {
archive.singleWhere((element) => element.name == 'pass.json');
final PassMetadata metadata = PassMetadata.fromJson(
_jsonCodec.decode(passFile.stringContent) as Map<String, Object?>,
json.decode(passFile.stringContent) as Map<String, Object?>,
);
return PassFile(metadata, folder);
@ -77,7 +65,7 @@ class PassFile {
final List<ArchiveFile> _folder;
Uint8List? _matchUtf8List({
Uint8List? _matchUint8ListFile({
required String name,
required Locale? locale,
required int scale,
@ -92,27 +80,27 @@ class PassFile {
);
if (path == null) return null;
final file = _folder.singleWhere((element) => element.name == path);
final content = file.rawContent?.toUint8List() ?? file.content as Uint8List;
final content = file.byteContent;
return content;
}
Uint8List? getBackground({Locale? locale, int scale = 1}) =>
_matchUtf8List(name: 'background', locale: locale, scale: scale);
_matchUint8ListFile(name: 'background', locale: locale, scale: scale);
Uint8List? getFooter({Locale? locale, int scale = 1}) =>
_matchUtf8List(name: 'footer', locale: locale, scale: scale);
_matchUint8ListFile(name: 'footer', locale: locale, scale: scale);
Uint8List? getIcon({Locale? locale, int scale = 1}) =>
_matchUtf8List(name: 'icon', locale: locale, scale: scale);
_matchUint8ListFile(name: 'icon', locale: locale, scale: scale);
Uint8List? getLogo({Locale? locale, int scale = 1}) =>
_matchUtf8List(name: 'logo', locale: locale, scale: scale);
_matchUint8ListFile(name: 'logo', locale: locale, scale: scale);
Uint8List? getStrip({Locale? locale, int scale = 1}) =>
_matchUtf8List(name: 'strip', locale: locale, scale: scale);
_matchUint8ListFile(name: 'strip', locale: locale, scale: scale);
Uint8List? getThumbnail({Locale? locale, int scale = 1}) =>
_matchUtf8List(name: 'thumbnail', locale: locale, scale: scale);
_matchUint8ListFile(name: 'thumbnail', locale: locale, scale: scale);
Map<String, String>? getLocalizations(Locale? locale) {
final files = _folder.map((e) => e.name).toList();
@ -129,6 +117,31 @@ class PassFile {
}
extension on ArchiveFile {
String get stringContent =>
_utf8codec.decode(rawContent?.toUint8List() ?? (content as Uint8List));
String get stringContent {
final codec = Charset.detect(
byteContent,
defaultEncoding: utf8,
orders: [
utf8,
ascii,
gbk,
latin1,
],
) ??
utf8;
return codec.decode(content);
}
Uint8List get byteContent {
decompress();
final content = this.content;
if (content is String) {
return utf8.encode(content);
} else if (content is Iterable) {
return Uint8List.fromList(content.cast<int>().toList());
} else {
return rawContent!.toUint8List();
}
}
}

View file

@ -12,7 +12,12 @@ abstract class FileMatcher {
}) {
final localized = matchLocale(files: files, name: 'logo', extension: 'png');
if (localized.isEmpty) return null;
final scaled = matchScale(files: localized, name: 'logo', extension: 'png', scale: scale);
final scaled = matchScale(
files: localized,
name: 'logo',
extension: 'png',
scale: scale,
);
final file = files.singleWhere((element) => element == scaled);
return file;
}
@ -23,17 +28,28 @@ abstract class FileMatcher {
required String name,
required String extension,
}) {
files.sort();
files.sort((a, b) {
final aLocalized = a.startsWith(RegExp('^[a-z]+(-[a-z]+)?\\.lproj\\/'));
final bLocalized = b.startsWith(RegExp('^[a-z]+(-[a-z]+)?\\.lproj\\/'));
if (aLocalized && bLocalized) {
return a.compareTo(b);
} else if (aLocalized) {
return -1;
} else {
return 1;
}
});
files = files.reversed.toList();
List<RegExp> expressions = <RegExp>[];
// adding the fallbacks
// - match just *any* language
// - match only unlocalized
// - match the five mostly spoken languages of the world, copied from Wikipedia
// - match just *any* language
expressions.addAll(
[
RegExp(
'^([a-z]+(-[a-z]+)?\\.lproj\\/)?$name(@\\d+x)?\\.$extension\$',
'^[a-z]+(-[a-z]+)?\\.lproj\\/$name(@\\d+x)?\\.$extension\$',
unicode: true,
caseSensitive: false,
),
@ -44,6 +60,11 @@ abstract class FileMatcher {
caseSensitive: false,
),
),
RegExp(
'^$name(@\\d+x)?\\.$extension\$',
unicode: true,
caseSensitive: false,
),
],
);

View file

@ -13,6 +13,7 @@ environment:
dependencies:
archive: ^3.3.7
barcode: ^2.2.4
charset: ^2.0.1
crypto: ^3.0.3
http: ^1.0.0
intl: ">=0.17.0 <1.0.0"