Initial commit
This commit is contained in:
commit
0f86b0434b
38 changed files with 2370 additions and 0 deletions
147
LibDgf/Dat/DatBuilder.cs
Normal file
147
LibDgf/Dat/DatBuilder.cs
Normal file
|
@ -0,0 +1,147 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
|
||||
namespace LibDgf.Dat
|
||||
{
|
||||
public class DatBuilder
|
||||
{
|
||||
public class ReplacementEntry
|
||||
{
|
||||
public int Index { get; set; }
|
||||
public DatReader SourceDat { get; set; }
|
||||
public int SourceIndex { get; set; }
|
||||
public string SourceFile { get; set; }
|
||||
}
|
||||
|
||||
class NewEntry
|
||||
{
|
||||
public DatEntry ArchEntry { get; set; } = new DatEntry();
|
||||
public DatEntry OrigEntry { get; set; }
|
||||
public int OrigIndex { get; set; }
|
||||
public ReplacementEntry ReplacementEntry { get; set; }
|
||||
}
|
||||
|
||||
DatReader sourceDat;
|
||||
|
||||
public List<ReplacementEntry> ReplacementEntries { get; } = new List<ReplacementEntry>();
|
||||
|
||||
public DatBuilder(DatReader sourceDat = null)
|
||||
{
|
||||
this.sourceDat = sourceDat;
|
||||
}
|
||||
|
||||
public void Build(Stream destStream)
|
||||
{
|
||||
// Check there are no duplicated indexes
|
||||
HashSet<int> indexSet = new HashSet<int>();
|
||||
foreach (var entry in ReplacementEntries)
|
||||
{
|
||||
if (entry.Index < 0) throw new InvalidOperationException("Entry with negative index present.");
|
||||
indexSet.Add(entry.Index);
|
||||
}
|
||||
if (indexSet.Count != ReplacementEntries.Count)
|
||||
{
|
||||
throw new InvalidOperationException("Replacement entries with non-unique IDs present.");
|
||||
}
|
||||
|
||||
ReplacementEntries.Sort((x, y) => x.Index.CompareTo(y.Index));
|
||||
List<NewEntry> newEntries = new List<NewEntry>();
|
||||
|
||||
// Copy over original entries
|
||||
if (sourceDat != null)
|
||||
{
|
||||
for (int i = 0; i < sourceDat.EntriesCount; ++i)
|
||||
{
|
||||
var e = sourceDat.GetEntry(i);
|
||||
newEntries.Add(new NewEntry { OrigEntry = e, OrigIndex = i });
|
||||
}
|
||||
}
|
||||
|
||||
// Set replacement entries
|
||||
foreach (var rep in ReplacementEntries)
|
||||
{
|
||||
if (rep.Index > newEntries.Count)
|
||||
throw new InvalidOperationException("Replacement entries results in incontinuity.");
|
||||
|
||||
var newEntry = new NewEntry { ReplacementEntry = rep };
|
||||
if (rep.Index == newEntries.Count)
|
||||
newEntries.Add(newEntry);
|
||||
else
|
||||
newEntries[rep.Index] = newEntry;
|
||||
}
|
||||
|
||||
// Update size and position
|
||||
uint dataOffset = 0;
|
||||
for (int i = 0; i < newEntries.Count; ++i)
|
||||
{
|
||||
var newEntry = newEntries[i];
|
||||
newEntry.ArchEntry.Offset = dataOffset;
|
||||
var repEntry = newEntry.ReplacementEntry;
|
||||
|
||||
if (repEntry == null)
|
||||
{
|
||||
newEntry.ArchEntry.Length = newEntry.OrigEntry.Length;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (repEntry.SourceDat != null)
|
||||
{
|
||||
if (repEntry.SourceFile != null)
|
||||
throw new InvalidOperationException("Replacement entries with both DAT and file source specified exist.");
|
||||
newEntry.ArchEntry.Length = repEntry.SourceDat.GetEntry(repEntry.SourceIndex).Length;
|
||||
}
|
||||
else if (repEntry.SourceFile != null)
|
||||
{
|
||||
newEntry.ArchEntry.Length = (uint)new FileInfo(repEntry.SourceFile).Length;
|
||||
}
|
||||
else
|
||||
{
|
||||
newEntry.ArchEntry.Length = 0;
|
||||
newEntries.RemoveAt(i);
|
||||
--i;
|
||||
}
|
||||
}
|
||||
dataOffset += newEntry.ArchEntry.Length;
|
||||
}
|
||||
|
||||
// Write file
|
||||
BinaryWriter bw = new BinaryWriter(destStream);
|
||||
bw.Write("DAT\0".ToCharArray());
|
||||
bw.Write(newEntries.Count);
|
||||
dataOffset = (uint)(((newEntries.Count + 1) * 8 + 15) & ~15);
|
||||
foreach (var newEntry in newEntries)
|
||||
{
|
||||
newEntry.ArchEntry.Offset += dataOffset;
|
||||
newEntry.ArchEntry.Write(bw);
|
||||
}
|
||||
// Do we need to 16-byte align anything after the header?
|
||||
if (destStream.Position < dataOffset)
|
||||
bw.Write(new byte[dataOffset - destStream.Position]);
|
||||
|
||||
foreach (var newEntry in newEntries)
|
||||
{
|
||||
var repEntry = newEntry.ReplacementEntry;
|
||||
if (repEntry == null)
|
||||
{
|
||||
bw.Write(sourceDat.GetData(newEntry.OrigIndex));
|
||||
}
|
||||
else
|
||||
{
|
||||
if (repEntry.SourceDat != null)
|
||||
{
|
||||
bw.Write(repEntry.SourceDat.GetData(repEntry.SourceIndex));
|
||||
}
|
||||
else
|
||||
{
|
||||
using (FileStream fs = File.OpenRead(repEntry.SourceFile))
|
||||
{
|
||||
fs.CopyTo(destStream);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
25
LibDgf/Dat/DatEntry.cs
Normal file
25
LibDgf/Dat/DatEntry.cs
Normal file
|
@ -0,0 +1,25 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
|
||||
namespace LibDgf.Dat
|
||||
{
|
||||
public class DatEntry
|
||||
{
|
||||
public uint Offset { get; set; }
|
||||
public uint Length { get; set; }
|
||||
|
||||
public void Read(BinaryReader br)
|
||||
{
|
||||
Offset = br.ReadUInt32();
|
||||
Length = br.ReadUInt32();
|
||||
}
|
||||
|
||||
public void Write(BinaryWriter bw)
|
||||
{
|
||||
bw.Write(Offset);
|
||||
bw.Write(Length);
|
||||
}
|
||||
}
|
||||
}
|
89
LibDgf/Dat/DatReader.cs
Normal file
89
LibDgf/Dat/DatReader.cs
Normal file
|
@ -0,0 +1,89 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
|
||||
namespace LibDgf.Dat
|
||||
{
|
||||
public class DatReader : IDisposable
|
||||
{
|
||||
Stream stream;
|
||||
BinaryReader br;
|
||||
List<DatEntry> entries = new List<DatEntry>();
|
||||
private bool disposedValue;
|
||||
|
||||
public DatReader(Stream stream)
|
||||
{
|
||||
this.stream = stream ?? throw new ArgumentNullException(nameof(stream));
|
||||
|
||||
br = new BinaryReader(stream);
|
||||
if (new string(br.ReadChars(4)) != "DAT\0") throw new InvalidDataException("Not a DAT file.");
|
||||
int numEntries = br.ReadInt32();
|
||||
for (int i = 0; i < numEntries; ++i)
|
||||
{
|
||||
DatEntry entry = new DatEntry();
|
||||
entry.Read(br);
|
||||
entries.Add(entry);
|
||||
}
|
||||
}
|
||||
|
||||
public int EntriesCount
|
||||
{
|
||||
get
|
||||
{
|
||||
CheckDisposed();
|
||||
return entries.Count;
|
||||
}
|
||||
}
|
||||
|
||||
public byte[] GetData(int index)
|
||||
{
|
||||
CheckDisposed();
|
||||
if (index < 0) throw new ArgumentOutOfRangeException(nameof(index), "Index cannot be negative.");
|
||||
if (index >= EntriesCount) throw new ArgumentOutOfRangeException(nameof(index), "Index cannot be greater than or equal to count.");
|
||||
|
||||
var entry = entries[index];
|
||||
stream.Seek(entry.Offset, SeekOrigin.Begin);
|
||||
return br.ReadBytes((int)entry.Length);
|
||||
}
|
||||
|
||||
public DatEntry GetEntry(int index)
|
||||
{
|
||||
CheckDisposed();
|
||||
if (index < 0) throw new ArgumentOutOfRangeException(nameof(index), "Index cannot be negative.");
|
||||
if (index >= EntriesCount) throw new ArgumentOutOfRangeException(nameof(index), "Index cannot be greater than or equal to count.");
|
||||
|
||||
var entry = entries[index];
|
||||
return new DatEntry
|
||||
{
|
||||
Offset = entry.Offset,
|
||||
Length = entry.Length
|
||||
};
|
||||
}
|
||||
|
||||
void CheckDisposed()
|
||||
{
|
||||
if (disposedValue) throw new ObjectDisposedException(GetType().FullName);
|
||||
}
|
||||
|
||||
protected virtual void Dispose(bool disposing)
|
||||
{
|
||||
if (!disposedValue)
|
||||
{
|
||||
if (disposing)
|
||||
{
|
||||
stream.Dispose();
|
||||
}
|
||||
|
||||
disposedValue = true;
|
||||
}
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
// Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method
|
||||
Dispose(disposing: true);
|
||||
GC.SuppressFinalize(this);
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue