hosaka

joined 2 years ago
[โ€“] hosaka 1 points 2 weeks ago

Zig

const std = @import("std");
const List = std.ArrayList;
const Map = std.AutoHashMap;

const tokenizeScalar = std.mem.tokenizeScalar;
const splitScalar = std.mem.splitScalar;
const parseInt = std.fmt.parseInt;
const print = std.debug.print;
const contains = std.mem.containsAtLeast;
const eql = std.mem.eql;

var gpa = std.heap.GeneralPurposeAllocator(.{}){};
const alloc = gpa.allocator();

const Answer = struct {
    middle_sum: i32,
    reordered_sum: i32,
};

pub fn solve(input: []const u8) !Answer {
    var rows = splitScalar(u8, input, '\n');

    // key is a page number and value is a
    // list of pages to be printed before it
    var rules = Map(i32, List(i32)).init(alloc);
    var pages = List([]i32).init(alloc);
    defer {
        var iter = rules.iterator();
        while (iter.next()) |rule| {
            rule.value_ptr.deinit();
        }
        rules.deinit();
        pages.deinit();
    }

    var parse_rules = true;
    while (rows.next()) |row| {
        if (eql(u8, row, "")) {
            parse_rules = false;
            continue;
        }

        if (parse_rules) {
            var rule_pair = tokenizeScalar(u8, row, '|');
            const rule = try rules.getOrPut(try parseInt(i32, rule_pair.next().?, 10));
            if (!rule.found_existing) {
                rule.value_ptr.* = List(i32).init(alloc);
            }
            try rule.value_ptr.*.append(try parseInt(i32, rule_pair.next().?, 10));
        } else {
            var page = List(i32).init(alloc);
            var page_list = tokenizeScalar(u8, row, ',');
            while (page_list.next()) |list| {
                try page.append(try parseInt(i32, list, 10));
            }
            try pages.append(try page.toOwnedSlice());
        }
    }

    var middle_sum: i32 = 0;
    var reordered_sum: i32 = 0;

    var wrong_order = false;
    for (pages.items) |page| {
        var index: usize = page.len - 1;
        while (index > 0) : (index -= 1) {
            var page_rule = rules.get(page[index]) orelse continue;

            // check the rest of the pages
            var remaining: usize = 0;
            while (remaining < page[0..index].len) {
                if (contains(i32, page_rule.items, 1, &[_]i32{page[remaining]})) {
                    // re-order the wrong page
                    const element = page[remaining];
                    page[remaining] = page[index];
                    page[index] = element;
                    wrong_order = true;

                    if (rules.get(element)) |next_rule| {
                        page_rule = next_rule;
                    }

                    continue;
                }
                remaining += 1;
            }
        }
        if (wrong_order) {
            reordered_sum += page[(page.len - 1) / 2];
            wrong_order = false;
        } else {
            // middle page number
            middle_sum += page[(page.len - 1) / 2];
        }
    }
    return Answer{ .middle_sum = middle_sum, .reordered_sum = reordered_sum };
}

pub fn main() !void {
    const answer = try solve(@embedFile("input.txt"));
    print("Part 1: {d}\n", .{answer.middle_sum});
    print("Part 2: {d}\n", .{answer.reordered_sum});
}

test "test input" {
    const answer = try solve(@embedFile("test.txt"));
    try std.testing.expectEqual(143, answer.middle_sum);
    try std.testing.expectEqual(123, answer.reordered_sum);
}

[โ€“] hosaka 1 points 2 weeks ago

Zig

const std = @import("std");
const List = std.ArrayList;

const tokenizeScalar = std.mem.tokenizeScalar;
const parseInt = std.fmt.parseInt;
const print = std.debug.print;
const eql = std.mem.eql;

var gpa = std.heap.GeneralPurposeAllocator(.{}){};
const alloc = gpa.allocator();

const Point = struct {
    x: isize,
    y: isize,
    fn add(self: *const Point, point: *const Point) Point {
        return Point{ .x = self.x + point.x, .y = self.y + point.y };
    }
};

// note: i have no idea how to use this or if it's even possible
// const DirectionType = enum(u8) { Up, Down, Left, Right, UpLeft, UpRight, DownLeft, DownRight };
// const Direction = union(DirectionType) {
//     up: Point = .{ .x = 0, .y = 0 },
// };

const AllDirections = [_]Point{
    .{ .x = 0, .y = -1 }, // up
    .{ .x = 0, .y = 1 }, // down
    .{ .x = -1, .y = 0 }, // left
    .{ .x = 1, .y = 0 }, // right
    .{ .x = -1, .y = -1 }, // up left
    .{ .x = 1, .y = -1 }, // up right
    .{ .x = -1, .y = 1 }, // down left
    .{ .x = 1, .y = 1 }, // down right
};

const Answer = struct {
    xmas: u32,
    mas: u32,
};

pub fn searchXmas(letters: List([]const u8), search_char: u8, position: Point, direction: Point) u32 {
    const current_char = getChar(letters, position);
    if (current_char == search_char) {
        const next = position.add(&direction);
        if (current_char == 'M') {
            return searchXmas(letters, 'A', next, direction);
        } else if (current_char == 'A') {
            return searchXmas(letters, 'S', next, direction);
        } else if (current_char == 'S') {
            return 1; // found all letters
        }
    }
    return 0;
}

pub fn countXmas(letters: List([]const u8), starts: List(Point)) u32 {
    var counter: u32 = 0;
    for (starts.items) |start| {
        for (AllDirections) |direction| {
            const next = start.add(&direction);
            counter += searchXmas(letters, 'M', next, direction);
        }
    }
    return counter;
}

pub fn countMas(letters: List([]const u8), starts: List(Point)) u32 {
    var counter: u32 = 0;
    for (starts.items) |start| {
        const a_char = getChar(letters, start) orelse continue;
        const top_left_char = getChar(letters, start.add(&AllDirections[4])) orelse continue;
        const down_right_char = getChar(letters, start.add(&AllDirections[7])) orelse continue;
        const top_right_char = getChar(letters, start.add(&AllDirections[5])) orelse continue;
        const down_left_char = getChar(letters, start.add(&AllDirections[6])) orelse continue;

        const tldr = [3]u8{ top_left_char, a_char, down_right_char };
        const trdl = [3]u8{ top_right_char, a_char, down_left_char };
        if ((eql(u8, &tldr, "MAS") or eql(u8, &tldr, "SAM")) and (eql(u8, &trdl, "MAS") or eql(u8, &trdl, "SAM"))) {
            counter += 1;
        }
    }
    return counter;
}

pub fn getChar(letters: List([]const u8), point: Point) ?u8 {
    if (0 > point.x or point.x >= letters.items.len) {
        return null;
    }
    const row = @as(usize, @intCast(point.x));

    if (0 > point.y or point.y >= letters.items[row].len) {
        return null;
    }
    const col = @as(usize, @intCast(point.y));
    return letters.items[row][col];
}

pub fn solve(input: []const u8) !Answer {
    var rows = tokenizeScalar(u8, input, '\n');

    var letters = List([]const u8).init(alloc);
    defer letters.deinit();
    var x_starts = List(Point).init(alloc);
    defer x_starts.deinit();
    var a_starts = List(Point).init(alloc);
    defer a_starts.deinit();

    var x: usize = 0;
    while (rows.next()) |row| {
        try letters.append(row);
        for (row, 0..) |letter, y| {
            if (letter == 'X') {
                try x_starts.append(.{ .x = @intCast(x), .y = @intCast(y) });
            } else if (letter == 'A') {
                try a_starts.append(.{ .x = @intCast(x), .y = @intCast(y) });
            }
        }
        x += 1;
    }

    // PART 1
    const xmas = countXmas(letters, x_starts);

    // PART 2
    const mas = countMas(letters, a_starts);

    return Answer{ .xmas = xmas, .mas = mas };
}

pub fn main() !void {
    const answer = try solve(@embedFile("input.txt"));
    print("Part 1: {d}\n", .{answer.xmas});
    print("Part 2: {d}\n", .{answer.mas});
}

test "test input" {
    const answer = try solve(@embedFile("test.txt"));
    try std.testing.expectEqual(18, answer.xmas);
}

[โ€“] hosaka 1 points 2 weeks ago

#Zig

const std = @import("std");
const List = std.ArrayList;

const splitScalar = std.mem.splitScalar;
const parseInt = std.fmt.parseInt;
const print = std.debug.print;
const concat = std.mem.concat;

var gpa = std.heap.GeneralPurposeAllocator(.{}){};
const alloc = gpa.allocator();

const Answer = struct {
    safe: u32,
    tolerated: u32,
};

pub fn isSafe(levels: []i32) bool {
    if (levels.len == 0) {
        return false;
    }
    // slide window in pairs, advancing by one
    var it = std.mem.window(i32, levels, 2, 1);
    const first = it.first();
    const decreasing = first[0] - first[1] > 0;
    it.reset(); // rewind the iterator

    while (it.next()) |slice| {
        const lhs: i32 = slice[0];
        const rhs: i32 = slice[1];
        if (decreasing) {
            if (lhs <= rhs) return false;
            if (lhs - rhs < 1 or lhs - rhs > 3) return false;
        } else {
            if (rhs <= lhs) return false;
            if (rhs - lhs < 1 or rhs - lhs > 3) return false;
        }
    }
    return true;
}

pub fn solve(input: []const u8) !Answer {
    var rows = splitScalar(u8, input, '\n');

    // PART 1

    // determine how many reports are safe
    var safe_reports: u32 = 0;
    var tolerated_reports: u32 = 0;
    var unsafe_reports = List([]i32).init(alloc);
    defer unsafe_reports.deinit();

    while (rows.next()) |row| {
        var levels = splitScalar(u8, row, ' ');

        var report = List(i32).init(alloc);
        defer report.deinit();

        while (levels.next()) |level| {
            const value = parseInt(i32, level, 10) catch continue;
            report.append(value) catch continue;
        }

        if (isSafe(report.items)) {
            safe_reports += 1;
        } else {
            try unsafe_reports.append(try alloc.dupe(i32, report.items));
        }
    }

    // PART 2

    // determine how many unsafe reports can be tolerated
    for (unsafe_reports.items) |report| {
        var index: usize = 0;
        while (index < report.len) : (index += 1) {
            // mutate report by removing one level
            const mutated_report = concat(
                alloc,
                i32,
                &[_][]const i32{ report[0..index], report[index + 1 ..] },
            ) catch report;
            defer alloc.free(mutated_report);

            if (isSafe(mutated_report)) {
                tolerated_reports += 1;
                break;
            }
        }
    }

    return Answer{ .safe = safe_reports, .tolerated = safe_reports + tolerated_reports };
}

pub fn main() !void {
    const answer = try solve(@embedFile("input.txt"));
    print("Part 1: {d}\n", .{answer.safe});
    print("Part 2: {d}\n", .{answer.tolerated});
}

test "test input" {
    const answer = try solve(@embedFile("test.txt"));
    try std.testing.expectEqual(2, answer.safe);
    try std.testing.expectEqual(4, answer.tolerated);
[โ€“] hosaka 1 points 2 weeks ago

Zig

const std = @import("std");
const List = std.ArrayList;
const Map = std.AutoHashMap;

const splitSeq = std.mem.splitSequence;
const splitScalar = std.mem.splitScalar;
const parseInt = std.fmt.parseInt;
const print = std.debug.print;
const sort = std.sort.block;

var gpa = std.heap.GeneralPurposeAllocator(.{}){};
const alloc = gpa.allocator();

const Answer = struct {
    distance: u32,
    similarity: u32,
};

fn lessThan(_: void, lhs: []const u8, rhs: []const u8) bool {
    return std.mem.lessThan(u8, lhs, rhs);
}

pub fn solve(input: []const u8) !Answer {
    var rows = splitScalar(u8, input, '\n');
    var left_list = List([]const u8).init(alloc);
    defer left_list.deinit();
    var right_list = List([]const u8).init(alloc);
    defer right_list.deinit();

    // PART 1

    // split the rows into two lists
    while (rows.next()) |row| {
        var sides = splitSeq(u8, row, "   ");
        try left_list.append(sides.next() orelse break);
        try right_list.append(sides.next() orelse break);
    }
    _ = left_list.pop(); // last null

    // sort both lists
    sort([]const u8, left_list.items, {}, lessThan);
    sort([]const u8, right_list.items, {}, lessThan);

    var distance: u32 = 0;
    for (left_list.items, right_list.items) |left, right| {
        distance += @abs(try parseInt(i32, left, 10) - try parseInt(i32, right, 10));
    }

    // PART 2
    var right_scores = Map(i32, u32).init(alloc);
    defer right_scores.deinit();

    // count number of item appearances in the right list
    for (right_list.items) |item| {
        const value = try parseInt(i32, item, 10);
        const result = try right_scores.getOrPut(value);
        if (!result.found_existing) {
            result.value_ptr.* = 1;
        } else {
            result.value_ptr.* += 1;
        }
    }

    // sum up similarity between items in left list and right list scores
    var similarity: u32 = 0;
    for (left_list.items) |item| {
        const value = try parseInt(i32, item, 10);
        const result = right_scores.get(value) orelse 0;
        similarity += @as(u32, @intCast(value)) * result;
    }
    return Answer{ .distance = distance, .similarity = similarity };
}

pub fn main() !void {
    const answer = try solve(@embedFile("input.txt"));
    print("Part 1: {d}\n", .{answer.distance});
    print("Part 2: {d}\n", .{answer.similarity});
}

test "test input" {
    const answer = try solve(@embedFile("test.txt"));
    try std.testing.expectEqual(answer.distance, 11);
    try std.testing.expectEqual(answer.similarity, 31);
}

[โ€“] hosaka 1 points 2 weeks ago

Been trying to pick up zig, so made sense to try the AoC with it as well.

[โ€“] hosaka 1 points 2 months ago (1 children)

If only it wasn't paywalled

[โ€“] hosaka 2 points 2 months ago (2 children)

HTMX is great by I don't think it's what OP needs since the input and desired output is not hypermedia in the first place.

[โ€“] hosaka 3 points 2 months ago

Honestly not sure about Swagger, I've only ever used swagger-ui to show the API docs on a webpage. OpenAPI as a standard and openapi-generator are not abandoned and quite active. I'll give you an example of how I use it.

I have a FastAPI server in python that defines some endpoints and data models that it can work with, it exports an openapi.json definition. I also have a common schemas library defined with pydantic that also exports an openapi.json (python was chosen to make it easier for other team members to make quick changes). This schemas library is also imported in the FastAPI app, basically only the data models are shared.

I use the FastAPI/openapi.json to generate C++ code in one application (the end user app) using the openapi-generator-cli, serialize/deserialize is handled by the generated code, since the pydantic schema is a dependency of the FastAPI server, both the endpoints and data models get generated. The pydantic/openapi.json is also used by our frontend written in typescript to generate data models only since the frontend doesn't need to call FastAPI directly but it has an option to in the future by generating from FastAPI/openapi.json instead.

This ensures that we're using the same schema across all codebases. When I make changes to the schema, the code gets re-generated and included in the new c++/web app builds. There's multiple ways to go about versioning, but for data only schema I'd just keep it backwards compatible forever (by adding new props as optional field rather than required and slowly deprecating/removing props that are no longer used).

I found this to be more convoluted than just using something like gRPC/Protobuf (which can also be serialized from JSON), I've used it before and it was great. But for other devs that need to change a few lines of python and not having to deal with protobuf compiler, it's a more frictionless solution at the cost of more moving parts and some CICD setup on my side.

[โ€“] hosaka 6 points 2 months ago (12 children)

Use Open API schema. You can define data models and endpoints or just the models, I do this at work. Then generate your code using openapi-generator.

[โ€“] hosaka 4 points 3 months ago (1 children)

Double Commander is also worth mentioning

[โ€“] hosaka 7 points 4 months ago* (last edited 4 months ago) (1 children)

I think it's is not aimed to protect against potential attacks, this is aimed at a developer using/writing modules of code. This is not a security guard

[โ€“] hosaka 1 points 4 months ago

Glad you figured it out! A separate network for a set of services that need to talk to eachother is the way I do it for my selfhosted tools, if you want some more ideas on setting up the *arr apps using docker compose, this is my current setup: https://github.com/hosaka/selfhosted/blob/main/servarr.yml

view more: next โ€บ