www/larbin: Remove expired port

2024-05-31 www/larbin: Abandonware, obsolete web crawler
This commit is contained in:
Rene Ladan 2024-06-11 18:06:43 +02:00
commit f96e9184f9

1
MOVED
View file

@ -3296,3 +3296,4 @@ www/bkmrkconv||2024-06-11|Has expired: Obsolete utility, generated files are alr
www/chems||2024-06-11|Has expired: Abandonware, last release in 2009 and no activity since upstream
www/cssed||2024-06-11|Has expired: Abandonware and broken, doesn't work with Python 3.x
www/goose||2024-06-11|Has expired: Abandonware, last update in 2003 and upstream is dead
www/larbin||2024-06-11|Has expired: Abandonware, obsolete web crawler

View file

@ -295,7 +295,6 @@
SUBDIR += kohana
SUBDIR += ladybird
SUBDIR += lagrange
SUBDIR += larbin
SUBDIR += libapreq2
SUBDIR += libdatachannel
SUBDIR += libdom

View file

@ -1,72 +0,0 @@
PORTNAME= larbin
PORTVERSION= 2.6.3
PORTREVISION= 3
CATEGORIES= www
MASTER_SITES= SF
MAINTAINER= ports@FreeBSD.org
COMMENT= HTTP crawler with an easy interface
WWW= http://www.ailleret.com/
LICENSE= GPLv2
LICENSE_FILE= ${WRKSRC}/COPYING
DEPRECATED= Abandonware, obsolete web crawler
EXPIRATION_DATE=2024-05-31
BUILD_DEPENDS= bash:shells/bash \
makedepend:devel/makedepend
USES= gmake shebangfix
SHEBANG_FILES= configure
GNU_CONFIGURE= yes
MAKE_JOBS_UNSAFE= yes
SUB_FILES= pkg-message
PORTDOCS= *
PLIST_FILES= bin/larbin etc/larbin.conf.sample
OPTIONS_DEFINE= DOCS
pre-everything::
@${ECHO_MSG} "====================================================================="
@${ECHO_MSG} ""
@${ECHO_MSG} "Make sure you have a look at the larbin documentation available at:"
@${ECHO_MSG} " http://larbin.sourceforge.net/custom-eng.html."
@${ECHO_MSG} ""
@${ECHO_MSG} "====================================================================="
post-extract:
@(cd ${WRKSRC}/src/utils && ${MV} string.h mystring.h)
post-patch:
@${REINPLACE_CMD} -e \
'/CC=/s|echo|${TRUE:T}| ; \
/CXX=/s|echo|${TRUE:T}| ; \
/-lpthread/d' ${WRKSRC}/configure
@${REINPLACE_CMD} -e \
's|CFLAGS:=-O3|CFLAGS+=| ; \
s|CXXFLAGS:=|CXXFLAGS+=| ; \
s|-O3|| ; \
s|-I-||' ${WRKSRC}/src/larbin.make
@${REINPLACE_CMD} -e \
'/delete \[\] tab/s|^|//|' ${WRKSRC}/src/utils/histogram.cc
@${FIND} ${WRKSRC}/src -name "*.cc" -or -name "*.h" | ${XARGS} \
${REINPLACE_CMD} -e \
's|utils/string.h|utils/mystring.h| ; \
s|<iostream.h>|<iostream>| ; \
s|<string.h>|<cstring>| ; \
s|\([[:blank:]]\)cerr|\1std::cerr|g ; \
s|\([[:blank:]]\)cout|\1std::cout|g ; \
s|\([[:blank:]]\)endl|\1std::endl|g'
do-install:
${INSTALL_PROGRAM} ${WRKSRC}/larbin ${STAGEDIR}${PREFIX}/bin
${INSTALL_DATA} ${WRKSRC}/larbin.conf ${STAGEDIR}${PREFIX}/etc/larbin.conf.sample
do-install-DOCS-on:
@${MKDIR} ${STAGEDIR}${DOCSDIR}
${INSTALL_DATA} ${WRKSRC}/doc/* ${STAGEDIR}${DOCSDIR}
.include <bsd.port.mk>

View file

@ -1,2 +0,0 @@
SHA256 (larbin-2.6.3.tar.gz) = 73c41fe9c1b7e06593b7e35f91b7a3251762fb93c83cedf208beaeeaca3a6f4d
SIZE (larbin-2.6.3.tar.gz) = 132993

View file

@ -1,31 +0,0 @@
--- adns/internal.h.orig 2002-01-02 10:44:52 UTC
+++ adns/internal.h
@@ -151,15 +151,17 @@ typedef struct allocnode {
struct allocnode *next, *back;
} allocnode;
-union maxalign {
+typedef union maxalign {
byte d[1];
struct in_addr ia;
long l;
void *p;
void (*fp)(void);
union maxalign *up;
-} data;
+} data_t;
+extern data_t data;
+
typedef struct {
void *ext;
void (*callback)(adns_query parent, adns_query child);
@@ -567,7 +569,7 @@ typedef enum {
} parsedomain_flags;
adns_status adns__parse_domain(adns_state ads, int serv, adns_query qu,
- vbuf *vb, parsedomain_flags flags,
+ vbuf *vb, adns_queryflags flags,
const byte *dgram, int dglen, int *cbyte_io, int max);
/* vb must already have been initialised; it will be reset if necessary.
* If there is truncation, vb->used will be set to 0; otherwise

View file

@ -1,11 +0,0 @@
[
{ type: install
message: <<EOM
You must invoke larbin with the name of the configuration file, e.g:
larbin -c config-file
Documentation: %%DOCSDIR%%
Configuration file: %%PREFIX%%/etc/larbin.conf.sample
EOM
}
]

View file

@ -1,19 +0,0 @@
Larbin is a powerful web crawler (also called [web] robot, spider...). It
is intended to fetch a large number of web pages to fill the database of a
search engine. With a network fast enough, Larbin is able to fetch more than
100 million pages on a standard PC.
Larbin was initially developed for the XYLEME project in the VERSO team at
INRIA. The goal of Larbin was to go and fetch XML pages on the web to fill
the database of an xml-oriented search engine.
The following can be done with Larbin:
o A crawler for a search engine
o A crawler for a specialized search enginer (xml, images, mp3...)
o Statistics on the web (about servers or page contents)
Larbin is created by: Sebastien Ailleret
See also: http://larbin.sourceforge.net/
See also: https://www.sourceforge.net/projects/larbin