--- al.c 2002/10/14 09:17:21 1.2
+++ al.c 2002/10/14 12:32:16 1.3
@@ -12,6 +12,8 @@
#define NEXT(n,l) ((n)->l.next)
#define PREV(n,l) ((n)->l.prev)
+#define ISEMPTY(q,l) (HEAD(q,l) == NULL)
+
#define LISTINIT(q,l) \
do { \
HEAD(q,l) = NULL; \
@@ -82,7 +84,41 @@
PREV(NEXT(n,l),l) = PREV(n,l); \
} while (0)
-#define ISEMPTY(q,l) (HEAD(q,l) == NULL)
+#define INSERT(q,l,i,n) \
+do { \
+ if (PREV(i,l)) { \
+ PREV(n,l) = PREV(i,l); \
+ NEXT(PREV(i,l),l) = (n); \
+ } \
+ PREV(i,l) = (n); \
+ NEXT(n,l) = (i); \
+} while (0)
+
+#define APPENDLIST(q1,l,q2) \
+do { \
+ if (TAIL(q1,l)) { \
+ NEXT(TAIL(q1,l),l) = HEAD(q2,l); \
+ PREV(HEAD(q2,l),l) = TAIL(q1,l); \
+ } else { \
+ HEAD(q1,l) = HEAD(q2,l); \
+ } \
+ TAIL(q1,l) = TAIL(q2,l); \
+ LISTINIT(q2,l); \
+} while(0)
+
+#define INSERTLIST(q1,l,i,q2) \
+do { \
+ if (PREV(i,l)) { \
+ NEXT(PREV(i,l),l) = HEAD(q2,l); \
+ } else { \
+ HEAD(q1,l) = HEAD(q2,l); \
+ } \
+ PREV(HEAD(q2,l),l) = PREV(i,l); \
+ NEXT(TAIL(q2,l),l) = (i); \
+ PREV(i,l) = TAIL(q2,l); \
+ LISTINIT(q2,l); \
+} while(0)
+
#define FOREACH(q,l,n) for (n = HEAD(q,l); n; n = NEXT(n,l))
#define FOREACHR(q,l,n) for (n = TAIL(q,l); n; n = PREV(n,l))
#define FOREACHD(q,l,n,r) for (n = TAIL(q,l); n && (r = PREV(n,l), 1); n = r)
@@ -173,6 +209,7 @@
/*
* allocate backing store and its control structure from heap
+ *
* can be freed when usecount drops to zero
*/
static
@@ -278,6 +315,27 @@
}
static
+al_rc_t split_chunk(al_t *al, al_chunk_t *orig, size_t off, al_chunk_t **alcp)
+{
+ al_rc_t rc;
+ al_chunk_t *alc;
+ size_t len = AL_CHUNK_LEN(orig);
+
+ if (off < 0 || off > len)
+ return AL_ERR_ARG;
+
+ rc = new_chunk(al, orig->buf, &alc);
+ if (rc != AL_OK) return rc;
+
+ alc->begin = orig->begin;
+ alc->end = orig->begin+off;
+ orig->begin = alc->end;
+
+ *alcp = alc;
+ return AL_OK;
+}
+
+static
void dispose_chunk(al_t *al, al_chunk_t *alc)
{
--alc->buf->usecount;
@@ -361,7 +419,7 @@
/******************************************************************/
/*
- * allocate an empty assembly line
+ * allocate an empty assembly list
* dispose all chunks and all allocated backing store
*/
al_rc_t al_create(al_t **alp)
@@ -372,7 +430,7 @@
if (alp == NULL)
return AL_RC(AL_ERR_ARG);
- /* allocate and initialize new assembly line object */
+ /* allocate and initialize new assembly list object */
/* XXX - what malloc ? */
if ((al = (al_t *)malloc(sizeof(al_t))) == NULL)
return AL_RC(AL_ERR_MEM);
@@ -414,6 +472,9 @@
return AL_OK;
}
+/*
+ * copy bytes into buffer, FIFO
+ */
al_rc_t al_append_bytes(al_t *al, const char *src, size_t n)
{
al_rc_t rc;
@@ -450,6 +511,9 @@
return AL_OK;
}
+/*
+ * copy bytes into buffer, LIFO
+ */
al_rc_t al_prepend_bytes(al_t *al, const char *src, size_t n)
{
al_rc_t rc;
@@ -488,6 +552,14 @@
return AL_OK;
}
+/*
+ * append a caller supplied buffer
+ *
+ * buffer must exist until list is destroyed
+ * XXX - buffer can have multiple refs caused by splice operations
+ * XXX - some list operations modify the buffer
+ *
+ */
al_rc_t al_attach_buffer(al_t *al, char *p, size_t n)
{
al_rc_t rc;
@@ -506,72 +578,167 @@
return AL_OK;
}
-al_rc_t al_splice(al_t *al, size_t off, size_t n, al_t *tal)
+/*
+ *
+ *
+ *
+ */
+al_rc_t al_splice(al_t *al, size_t off, size_t n, al_t *nal, al_t *tal)
{
al_rc_t rc;
al_chunk_t *cur, *next;
- size_t skip, len, step;
+ size_t pos, skip, len, step;
+ /* simplify API */
if (n == AL_BYTES_ALL) n = al->bytes - off;
- rc = al_seek(al, off, &cur, &skip);
+ pos = off;
+
+ /*
+ * seek to beginning, return EOF when seek position does not exist
+ * EOD must be a valid seek position so that we can append data
+ */
+ rc = al_seek(al, pos, &cur, &skip);
if (rc != AL_OK) return rc;
-printf("initial seek %d -> %p + %d\n",off,cur,skip);
- while (n > 0) {
- /* check if a partial chunk was selected */
+
+ /* as long as there is data to move */
+ while (n > 0 && cur != NULL) {
next = NEXT(cur, chunks);
len = AL_CHUNK_LEN(cur);
+
+ /*
+ * check if partial chunk was selected
+ * if skip > 0 we have to preserve bytes at the beginning
+ * if skip == 0 && n < len-skip we have to preserve bytes at the end
+ */
if (skip > 0 || n < len - skip) {
- /* copy span from src chunk to target */
- step = len - skip;
- if (n < step)
- step = n;
-printf("partial copy (%p + %d) + %d bytes\n",cur,skip,step);
+
+ /* compute number of bytes to process */
+ step = AL_CHUNK_SPAN(cur, skip, n);
+
if (tal != NULL)
al_append_bytes(tal, AL_CHUNK_PTR(cur, skip), step);
- /* cut span from src chunk */
+
+ /*
+ * cut span from src chunk
+ * if skip == 0, simply shrink the chunk from the beginning
+ * if skip > 0, compute number of bytes to preserve,
+ * align buffer and shrink chunk from the end
+ */
if (skip == 0) {
-printf("shrink at begin\n");
AL_PRESIZE(al, cur, -step);
} else {
/* align trailing bytes */
if (len > (skip+step)) {
-printf("align chunk buffer (%d + %d) <- %d\n",skip,step,len-(skip+step));
memmove(
AL_CHUNK_PTR(cur, skip),
AL_CHUNK_PTR(cur, skip+step),
len - (skip+step)
);
}
-printf("shrink at end\n");
AL_RESIZE(al, cur, -step);
}
+
} else {
+
+ /*
+ * the whole chunk has to be moved, simply move it
+ * to the target chain
+ * manual accounting for total size
+ */
step = len;
-printf("hand over chunk %d\n",step);
REMOVE(al, chunks, cur);
al->bytes -= step;
if (tal != NULL) {
ADDTAIL(tal, chunks, cur);
tal->bytes += step;
} else {
- dispose_chunk(al,cur);
+ dispose_chunk(al, cur);
}
+
}
n -= step;
- off += step;
+ pos += step;
cur = next;
skip = 0;
+
+ if (cur == NULL)
+ break;
}
- return AL_ERR_INT;
+ /*
+ * now splice in replacement chunks
+ */
+ if (nal && !ISEMPTY(nal,chunks)) {
+ al_chunk_t *ins;
+
+ /* rewind */
+ rc = al_seek(al, off, &ins, &skip);
+ if (rc != AL_OK) return rc;
+
+ if (ins == NULL) {
+
+ /*
+ * simple case where list end is 'replaced'
+ */
+ APPENDLIST(al,chunks,nal);
+
+ } else {
+
+ /*
+ * if beginning and end were in same chunk we have to split this
+ * chunk in two for a copyless insert operation
+ */
+ if (skip > 0) {
+ al_chunk_t *twin;
+ rc = split_chunk(al, ins, skip, &twin);
+ if (rc != AL_OK) return rc;
+
+ INSERT(al,chunks,ins,twin);
+ }
+ INSERTLIST(al,chunks,ins,nal);
+
+ }
+
+ al->bytes += nal->bytes;
+ nal->bytes = 0;
+ }
+
+
+ return AL_OK;
+}
+
+/*
+ * list traversal requires a context. It needs to be
+ * malloced to keep its inner structure private
+ */
+al_rc_t al_txalloc(al_t *al, al_tx_t **txpp)
+{
+ al_tx_t *txp;
+
+ txp = (al_tx_t*)(al->m.malloc)(sizeof(al_tx_t));
+ if (txp == NULL)
+ return AL_ERR_MEM;
+
+ *txpp = txp;
+ return AL_OK;
}
-al_rc_t al_truncate(al_t *al, size_t off, size_t n)
+/*
+ * free traversal context using proper policy function
+ */
+al_rc_t al_txfree(al_t *al, al_tx_t *txp)
{
- return al_splice(al, off, n, NULL);
+ (al->m.free)(txp);
+ return AL_OK;
}
+/*
+ * initiate list traversal
+ *
+ * - do initial seek, fail if position does not exist
+ * - save traversal parameters
+ */
al_rc_t al_traverse(al_t *al, size_t off, size_t n, al_td_t dir, al_tx_t *txp)
{
al_rc_t rc;
@@ -581,7 +748,7 @@
txp->cur = NULL;
rc = al_seek(al, off, &txp->cur, &txp->skip);
- if (rc != AL_OK) return AL_OK;
+ if (rc != AL_OK) return rc;
txp->dir = dir;
txp->togo = n;
@@ -589,6 +756,13 @@
return AL_OK;
}
+/*
+ * list traversal step
+ *
+ * return EOF if at end of list
+ * clip view chunk to traversal bounds
+ * advance chunk cursor according to traversal direction
+ */
al_rc_t al_traverse_next(al_t *al, al_tx_t *txp, al_chunk_t **alcp)
{
size_t step;
@@ -626,11 +800,19 @@
return AL_OK;
}
+/*
+ * full list traversal with callback
+ *
+ * traversal is aborted when callback return != AL_OK
+ * reaching EOF (and also aborting with AL_ERR_EOF) is not an error
+ *
+ * traversal context is kept on stack (XXX ?)
+ */
al_rc_t al_traverse_cb(al_t *al, size_t off, size_t n, al_td_t dir,
al_rc_t (*cb)(al_chunk_t *, void *), void *u)
{
al_rc_t rc;
- al_tx_t tx;
+ al_tx_t tx; /* XXX - private tx structure on stack */
al_chunk_t *view;
rc = al_traverse(al, off, n, dir, &tx);
@@ -648,21 +830,31 @@
return AL_OK;
}
-al_rc_t al_flatten(al_t *al, size_t off, size_t n, char *dst)
+/*
+ * full list traversal with data copy to linear buffer
+ *
+ * returns actual number of bytes copied
+ *
+ * traversal context is kept on stack (XXX ?)
+ */
+al_rc_t al_flatten(al_t *al, size_t off, size_t n, char *dst, size_t *lenp)
{
al_rc_t rc;
- al_tx_t tx;
+ al_tx_t tx; /* XXX - private tx structure on stack */
al_chunk_t *view;
- size_t step;
+ size_t step, total;
rc = al_traverse(al, off, n, AL_FORWARD, &tx);
if (rc != AL_OK) return rc;
+ total = 0;
while ((rc = al_traverse_next(al, &tx, &view)) == AL_OK) {
step = AL_CHUNK_LEN(view);
memmove(dst, AL_CHUNK_PTR(view,0), step);
- dst += step;
+ dst += step;
+ total += step;
}
+ *lenp = total;
if (rc != AL_ERR_EOF)
return rc;
@@ -670,21 +862,28 @@
return AL_OK;
}
+/*
+ * relay macros to caller
+ *
+ * al_bytes - total number of bytes in list
+ * al_chunk_len - number of bytes in chunk
+ * al_chunk_span - clip interval (off,off+n( against chunk length
+ * al_chunk_ptr - return memory pointer to byte within chunk
+ *
+ * off must be a valid offset within (0,len(
+ */
size_t al_bytes(const al_t *al)
{
return al->bytes;
}
-
size_t al_chunk_len(al_chunk_t *alc)
{
return AL_CHUNK_LEN(alc);
}
-
size_t al_chunk_span(al_chunk_t *alc, size_t off, size_t n)
{
return AL_CHUNK_SPAN(alc, off, n);
}
-
char *al_chunk_ptr(al_chunk_t *alc, size_t off)
{
return AL_CHUNK_PTR(alc, off);
|