1
mirror of https://github.com/DarkFlippers/unleashed-firmware.git synced 2025-12-12 12:42:30 +04:00
Files
unleashed-firmware/lib/update_util/resources/manifest.c

176 lines
5.9 KiB
C
Raw Normal View History

#include "manifest.h"
#include <toolbox/stream/buffered_file_stream.h>
#include <toolbox/strint.h>
#include <toolbox/hex.h>
struct ResourceManifestReader {
Storage* storage;
Stream* stream;
FuriString* linebuf;
ResourceManifestEntry entry;
};
ResourceManifestReader* resource_manifest_reader_alloc(Storage* storage) {
ResourceManifestReader* resource_manifest =
(ResourceManifestReader*)malloc(sizeof(ResourceManifestReader));
resource_manifest->storage = storage;
resource_manifest->stream = buffered_file_stream_alloc(resource_manifest->storage);
memset(&resource_manifest->entry, 0, sizeof(ResourceManifestEntry));
resource_manifest->entry.name = furi_string_alloc();
resource_manifest->linebuf = furi_string_alloc();
return resource_manifest;
}
void resource_manifest_reader_free(ResourceManifestReader* resource_manifest) {
furi_assert(resource_manifest);
furi_string_free(resource_manifest->linebuf);
furi_string_free(resource_manifest->entry.name);
buffered_file_stream_close(resource_manifest->stream);
stream_free(resource_manifest->stream);
free(resource_manifest);
}
bool resource_manifest_reader_open(ResourceManifestReader* resource_manifest, const char* filename) {
furi_assert(resource_manifest);
return buffered_file_stream_open(
resource_manifest->stream, filename, FSAM_READ, FSOM_OPEN_EXISTING);
}
/* Read entries in format of
* F:<hash>:<size>:<name>
* D:<name>
*/
ResourceManifestEntry* resource_manifest_reader_next(ResourceManifestReader* resource_manifest) {
furi_assert(resource_manifest);
furi_string_reset(resource_manifest->entry.name);
resource_manifest->entry.type = ResourceManifestEntryTypeUnknown;
resource_manifest->entry.size = 0;
memset(resource_manifest->entry.hash, 0, sizeof(resource_manifest->entry.hash));
do {
if(!stream_read_line(resource_manifest->stream, resource_manifest->linebuf)) {
return NULL;
}
/* Trim end of line */
furi_string_trim(resource_manifest->linebuf);
char type_code = furi_string_get_char(resource_manifest->linebuf, 0);
switch(type_code) {
case 'V':
resource_manifest->entry.type = ResourceManifestEntryTypeVersion;
break;
case 'T':
resource_manifest->entry.type = ResourceManifestEntryTypeTimestamp;
break;
case 'F':
resource_manifest->entry.type = ResourceManifestEntryTypeFile;
break;
case 'D':
resource_manifest->entry.type = ResourceManifestEntryTypeDirectory;
break;
default: /* Skip other entries - version, timestamp, etc */
continue;
};
if(resource_manifest->entry.type == ResourceManifestEntryTypeFile) {
/* Parse file entry
F:<hash>:<size>:<name> */
/* Remove entry type code */
furi_string_right(resource_manifest->linebuf, 2);
if(furi_string_search_char(resource_manifest->linebuf, ':') !=
sizeof(resource_manifest->entry.hash) * 2) {
/* Invalid hash */
continue;
}
/* Read hash */
hex_chars_to_uint8(
furi_string_get_cstr(resource_manifest->linebuf), resource_manifest->entry.hash);
/* Remove hash */
furi_string_right(
resource_manifest->linebuf, sizeof(resource_manifest->entry.hash) * 2 + 1);
if(strint_to_uint32(
furi_string_get_cstr(resource_manifest->linebuf),
NULL,
&resource_manifest->entry.size,
10) != StrintParseNoError)
break;
/* Remove size */
size_t offs = furi_string_search_char(resource_manifest->linebuf, ':');
furi_string_right(resource_manifest->linebuf, offs + 1);
furi_string_set(resource_manifest->entry.name, resource_manifest->linebuf);
} else { //-V547
/* Everything else is plain key value. Parse version, timestamp or directory entry
<Type>:<Value> */
/* Remove entry type code */
furi_string_right(resource_manifest->linebuf, 2);
furi_string_set(resource_manifest->entry.name, resource_manifest->linebuf);
}
return &resource_manifest->entry;
} while(true);
return NULL;
}
ResourceManifestEntry*
resource_manifest_reader_previous(ResourceManifestReader* resource_manifest) {
furi_assert(resource_manifest);
// Snapshot position for rollback
const size_t previous_position = stream_tell(resource_manifest->stream);
// We need to jump 2 lines back
size_t jumps = 2;
// Special case: end of the file.
const bool was_eof = stream_eof(resource_manifest->stream);
if(was_eof) {
jumps = 1;
}
while(jumps) {
if(!stream_seek_to_char(resource_manifest->stream, '\n', StreamDirectionBackward)) {
break;
}
if(stream_tell(resource_manifest->stream) < (previous_position - 1)) {
jumps--;
}
}
// Special case: first line. Force seek to zero
if(jumps == 1) {
jumps = 0;
stream_seek(resource_manifest->stream, 0, StreamOffsetFromStart);
}
if(jumps == 0) {
ResourceManifestEntry* entry = resource_manifest_reader_next(resource_manifest);
// Special case: was end of the file, prevent loop
if(was_eof) {
stream_seek(resource_manifest->stream, -1, StreamOffsetFromCurrent);
}
return entry;
} else {
stream_seek(resource_manifest->stream, previous_position, StreamOffsetFromStart);
return NULL;
}
}
Updater: resource compression (#3716) * toolbox: compress: moved decompressor implementation to separate func * toolbox: compress: callback-based api; cli: storage unpack command * toolbox: compress: separate r/w contexts for stream api * targets: f18: sync API * compress: naming fixes & cleanup * toolbox: compress: using hs buffer size for stream buffers * toolbox: tar: heatshrink stream mode * toolbox: compress: docs & small cleanup * toolbox: tar: header support for .hs; updater: now uses .hs for resources; .hs.tar: now rewindable * toolbox: compress: fixed hs stream tail handling * updater: reworked progress for resources cleanup; rebalanced stage weights * updater: single-pass decompression; scripts: print resources compression ratio * updater: fixed warnings * toolbox: tar: doxygen * docs: update * docs: info or tarhs format; scripts: added standalone compression/decompression tool for heatshrink-formatted streams * scripts: tarhs: fixed parameter handling * cli: storage extract command; toolbox: tar: guess type based on extension * unit_tests: added test for streamed raw hs decompressor `compress_decode_streamed` * unit_tests: compress: added extraction test for .tar.hs * rpc: autodetect compressed archives * scripts: minor cleanup of common parts * scripts: update: now using in-memory intermediate tar stream * scripts: added hs.py wrapper for heatshrink-related ops (single object and directory-as-tar compression) * scripts: naming fixes * Toolbox: export compress_config_heatshrink_default as const symbol * Toolbox: fix various types naming * Toolbox: more of types naming fixes * Toolbox: use size_t in compress io callbacks and structures * UnitTests: update to match new compress API * Toolbox: proper path_extract_extension usage Co-authored-by: あく <alleteam@gmail.com>
2024-06-30 13:38:48 +03:00
bool resource_manifest_rewind(ResourceManifestReader* resource_manifest) {
furi_assert(resource_manifest);
return stream_seek(resource_manifest->stream, 0, StreamOffsetFromStart);
}