mirror of
https://github.com/vincentmli/bpfire.git
synced 2026-04-26 02:42:58 +02:00
Merge branch 'next' of ssh://git.ipfire.org/pub/git/ipfire-2.x into next-suricata
This commit is contained in:
@@ -1,89 +0,0 @@
|
||||
#!/usr/bin/perl
|
||||
#
|
||||
# Converter for MaxMind CSV database to binary, for xt_geoip
|
||||
# Copyright © Jan Engelhardt, 2008-2011
|
||||
#
|
||||
use Getopt::Long;
|
||||
use IO::Handle;
|
||||
use Text::CSV_XS; # or trade for Text::CSV
|
||||
use strict;
|
||||
|
||||
my $csv = Text::CSV_XS->new({
|
||||
allow_whitespace => 1,
|
||||
binary => 1,
|
||||
eol => $/,
|
||||
}); # or Text::CSV
|
||||
my $target_dir = ".";
|
||||
|
||||
&Getopt::Long::Configure(qw(bundling));
|
||||
&GetOptions(
|
||||
"D=s" => \$target_dir,
|
||||
);
|
||||
|
||||
if (!-d $target_dir) {
|
||||
print STDERR "Target directory $target_dir does not exist.\n";
|
||||
exit 1;
|
||||
}
|
||||
|
||||
my $dir = "$target_dir/LE";
|
||||
if (!-e $dir && !mkdir($dir)) {
|
||||
print STDERR "Could not mkdir $dir: $!\n";
|
||||
exit 1;
|
||||
}
|
||||
|
||||
&dump(&collect());
|
||||
|
||||
sub collect
|
||||
{
|
||||
my %country;
|
||||
|
||||
while (my $row = $csv->getline(*ARGV)) {
|
||||
if (!defined($country{$row->[4]})) {
|
||||
$country{$row->[4]} = {
|
||||
name => $row->[5],
|
||||
pool_v4 => [],
|
||||
pool_v6 => [],
|
||||
};
|
||||
}
|
||||
my $c = $country{$row->[4]};
|
||||
|
||||
push(@{$c->{pool_v4}}, [$row->[2], $row->[3]]);
|
||||
|
||||
if ($. % 4096 == 0) {
|
||||
print STDERR "\r\e[2K$. entries";
|
||||
}
|
||||
}
|
||||
|
||||
print STDERR "\r\e[2K$. entries total\n";
|
||||
return \%country;
|
||||
}
|
||||
|
||||
sub dump
|
||||
{
|
||||
my $country = shift @_;
|
||||
|
||||
foreach my $iso_code (sort keys %$country) {
|
||||
&dump_one($iso_code, $country->{$iso_code});
|
||||
}
|
||||
}
|
||||
|
||||
sub dump_one
|
||||
{
|
||||
my($iso_code, $country) = @_;
|
||||
my($file, $fh_le, $fh_be);
|
||||
|
||||
printf "%5u IPv4 ranges for %s %s\n",
|
||||
scalar(@{$country->{pool_v4}}),
|
||||
$iso_code, $country->{name};
|
||||
|
||||
$file = "$target_dir/LE/".uc($iso_code).".iv4";
|
||||
if (!open($fh_le, "> $file")) {
|
||||
print STDERR "Error opening $file: $!\n";
|
||||
exit 1;
|
||||
}
|
||||
foreach my $range (@{$country->{pool_v4}}) {
|
||||
print $fh_le pack("VV", $range->[0], $range->[1]);
|
||||
#print $fh_be pack("NN", $range->[0], $range->[1]);
|
||||
}
|
||||
close $fh_le;
|
||||
}
|
||||
@@ -2,7 +2,7 @@
|
||||
###############################################################################
|
||||
# #
|
||||
# IPFire.org - A linux based firewall #
|
||||
# Copyright (C) 2014 IPFire Development Team <info@ipfire.org> #
|
||||
# Copyright (C) 2019 IPFire Development Team <info@ipfire.org> #
|
||||
# #
|
||||
# This program is free software: you can redistribute it and/or modify #
|
||||
# it under the terms of the GNU General Public License as published by #
|
||||
@@ -24,13 +24,10 @@ TMP_FILE=$(mktemp -p $TMP_PATH)
|
||||
|
||||
SCRIPT_PATH=/usr/local/bin
|
||||
DEST_PATH=/usr/share/xt_geoip
|
||||
DB_PATH=/var/lib/GeoIP
|
||||
|
||||
DL_URL=https://geolite.maxmind.com/download/geoip/database
|
||||
DL_FILE=GeoIPCountryCSV.zip
|
||||
|
||||
CSV_FILE=GeoIPCountryWhois.csv
|
||||
|
||||
ARCH=LE
|
||||
DL_URL=http://geolite.maxmind.com/download/geoip/database
|
||||
DL_FILE=GeoLite2-Country-CSV.zip
|
||||
|
||||
eval $(/usr/local/bin/readhash /var/ipfire/proxy/settings)
|
||||
|
||||
@@ -57,24 +54,40 @@ function download() {
|
||||
# Get the latest GeoIP database from server.
|
||||
wget $DL_URL/$DL_FILE $PROXYSETTINGS -O $TMP_FILE
|
||||
|
||||
# Extract files.
|
||||
# Extract files to database path.
|
||||
unzip $TMP_FILE -d $TMP_PATH
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function install() {
|
||||
echo "Install CSV database..."
|
||||
|
||||
# Check if the database dir exists.
|
||||
if [ ! -e "$DB_PATH" ]; then
|
||||
mkdir -p $DB_PATH &>/dev/null
|
||||
fi
|
||||
|
||||
# Check if the directory for binary databases exists.
|
||||
if [ ! -e "$DEST_PATH" ]; then
|
||||
mkdir -p $DEST_PATH &>/dev/null
|
||||
fi
|
||||
|
||||
# Install CSV databases.
|
||||
if ! cp -af $TMP_PATH/*/* $DB_PATH &>/dev/null; then
|
||||
echo "Could not copy files. Aborting." >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function build() {
|
||||
echo "Convert database..."
|
||||
|
||||
# Check if the csv file exists.
|
||||
if [ ! -e $TMP_PATH/$CSV_FILE ]; then
|
||||
echo "$TMP_PATH/$CSV_FILE not found. Exiting."
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Run script to convert the CSV file into several xtables
|
||||
# compatible binary files.
|
||||
if ! $SCRIPT_PATH/xt_geoip_build $TMP_PATH/$CSV_FILE -D $TMP_PATH; then
|
||||
if ! $SCRIPT_PATH/xt_geoip_build -S $DB_PATH -D $DEST_PATH; then
|
||||
echo "Could not convert ruleset. Aborting." >&2
|
||||
return 1
|
||||
fi
|
||||
@@ -82,23 +95,6 @@ function build() {
|
||||
return 0
|
||||
}
|
||||
|
||||
function install() {
|
||||
echo "Install databases..."
|
||||
|
||||
# Check if our destination exist.
|
||||
if [ ! -e "$DEST_PATH" ]; then
|
||||
mkdir -p $DEST_PATH &>/dev/null
|
||||
fi
|
||||
|
||||
# Install databases.
|
||||
if ! cp -af $TMP_PATH/$ARCH $DEST_PATH &>/dev/null; then
|
||||
echo "Could not copy files. Aborting." >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function cleanup() {
|
||||
echo "Cleaning up temporary files..."
|
||||
if ! rm -rf $TMP_PATH &>/dev/null; then
|
||||
@@ -113,23 +109,18 @@ function main() {
|
||||
# Download ruleset.
|
||||
download || exit $?
|
||||
|
||||
# Convert the ruleset.
|
||||
if ! build; then
|
||||
# Do cleanup.
|
||||
cleanup || exit $?
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Install the converted ruleset.
|
||||
if ! install; then
|
||||
# Do cleanup.
|
||||
cleanup || exit $?
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Finaly remove temporary files.
|
||||
# Remove temporary files.
|
||||
cleanup || exit $?
|
||||
|
||||
# Convert the ruleset.
|
||||
build || exit $?
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user