ImageSource FromStream 不适用于 Xamarin Forms IOS
ImageSource FromStream not working on Xamarin Forms IOS
我正在从原始像素数据创建位图,然后使用 ImageSource.FromStream 加载图像。它适用于 android 和 uwp。我使用以下 class.
手动创建位图
public class BitmapBuilder
{
private const int BitmapTotalHeaderSize = 54;
private const int BitmapHeaderSize = 40;
private const int BitCount = 32;
private const int Planes = 1;
private const int DPI = 96;
private const int ColorsPerChannel = 4;
private static byte[] s_BitmapHeaderByte;
static BitmapBuilder()
{
CreateBitmapHeaderByte();
}
private static void CreateBitmapHeaderByte()
{
s_BitmapHeaderByte = new byte[BitmapTotalHeaderSize];
s_BitmapHeaderByte[0] = (byte)'B';
s_BitmapHeaderByte[1] = (byte)'M';
//Buffer.BlockCopy(BitConverter.GetBytes(buffer.Length), 0, buffer, 2, 4); // File Size
Buffer.BlockCopy(BitConverter.GetBytes(BitmapTotalHeaderSize), 0, s_BitmapHeaderByte, 6, 4); // Data Offset
Buffer.BlockCopy(BitConverter.GetBytes((int)0), 0, s_BitmapHeaderByte, 10, 4); // Padding
Buffer.BlockCopy(BitConverter.GetBytes(BitmapHeaderSize), 0, s_BitmapHeaderByte, 14, 4); // Header Size
//Buffer.BlockCopy(BitConverter.GetBytes((int)width), 0, s_BitmapHeaderByte, 18, 4); // Width
//Buffer.BlockCopy(BitConverter.GetBytes((int)height), 0, s_BitmapHeaderByte, 22, 4); // Height
Buffer.BlockCopy(BitConverter.GetBytes(Planes), 0, s_BitmapHeaderByte, 26, 2); // Planes
Buffer.BlockCopy(BitConverter.GetBytes(BitCount), 0, s_BitmapHeaderByte, 28, 2); // Bit count
Buffer.BlockCopy(BitConverter.GetBytes((int)0), 0, s_BitmapHeaderByte, 30, 4); // Compression
Buffer.BlockCopy(BitConverter.GetBytes((int)0), 0, s_BitmapHeaderByte, 34, 4); // Image Size
Buffer.BlockCopy(BitConverter.GetBytes(DPI), 0, s_BitmapHeaderByte, 38, 4); // XpixelPerM
Buffer.BlockCopy(BitConverter.GetBytes(DPI), 0, s_BitmapHeaderByte, 42, 4); // YpixelPerM
Buffer.BlockCopy(BitConverter.GetBytes((int)0), 0, s_BitmapHeaderByte, 46, 4); // Colors Used
Buffer.BlockCopy(BitConverter.GetBytes((int)0), 0, s_BitmapHeaderByte, 50, 4); // Colors Important
}
public static byte[] GetBitmapFromRawData(IntPtr pixelData, int width, int height)
{
byte[] buffer = new byte[height * width * ColorsPerChannel + BitmapTotalHeaderSize];
Buffer.BlockCopy(s_BitmapHeaderByte, 0, buffer, 0, s_BitmapHeaderByte.Length);
Buffer.BlockCopy(BitConverter.GetBytes(buffer.Length), 0, buffer, 2, 4); // File Size
Buffer.BlockCopy(BitConverter.GetBytes((int)width), 0, buffer, 18, 4); // Width
Buffer.BlockCopy(BitConverter.GetBytes((int)height), 0, buffer, 22, 4); // Height
Marshal.Copy(pixelData, buffer, BitmapTotalHeaderSize, buffer.Length - BitmapTotalHeaderSize); // Write raw data
return buffer;
}
然后我将流设置为图像控件
byte[] buffer = BitmapBuilder.GetBitmapFromRawData(pixelData, (int)width, (int)height);
img.Source = ImageSource.FromStream(() => new MemoryStream(buffer));
它不适用于 IOS。
然后我保存了手动创建的位图并将其用作文件。它适用于 android 和 UWP,但不适用于 IOS。
string sFile = Path.Combine(documentDirectory, "testImage.bmp");
if (File.Exists(sFile))
{
img.Source = ImageSource.FromStream(() => File.OpenRead(sFile));
}
`
我很困惑,为什么 ImageSource.FromStream 似乎没有在 IOS 上工作。
我们决定使用 iOS 的依赖服务,并使用 CGImage 将原始像素数据转换为 PNG(而不是位图)。然后我们 return 来自 PNG 的流。
我正在从原始像素数据创建位图,然后使用 ImageSource.FromStream 加载图像。它适用于 android 和 uwp。我使用以下 class.
手动创建位图public class BitmapBuilder
{
private const int BitmapTotalHeaderSize = 54;
private const int BitmapHeaderSize = 40;
private const int BitCount = 32;
private const int Planes = 1;
private const int DPI = 96;
private const int ColorsPerChannel = 4;
private static byte[] s_BitmapHeaderByte;
static BitmapBuilder()
{
CreateBitmapHeaderByte();
}
private static void CreateBitmapHeaderByte()
{
s_BitmapHeaderByte = new byte[BitmapTotalHeaderSize];
s_BitmapHeaderByte[0] = (byte)'B';
s_BitmapHeaderByte[1] = (byte)'M';
//Buffer.BlockCopy(BitConverter.GetBytes(buffer.Length), 0, buffer, 2, 4); // File Size
Buffer.BlockCopy(BitConverter.GetBytes(BitmapTotalHeaderSize), 0, s_BitmapHeaderByte, 6, 4); // Data Offset
Buffer.BlockCopy(BitConverter.GetBytes((int)0), 0, s_BitmapHeaderByte, 10, 4); // Padding
Buffer.BlockCopy(BitConverter.GetBytes(BitmapHeaderSize), 0, s_BitmapHeaderByte, 14, 4); // Header Size
//Buffer.BlockCopy(BitConverter.GetBytes((int)width), 0, s_BitmapHeaderByte, 18, 4); // Width
//Buffer.BlockCopy(BitConverter.GetBytes((int)height), 0, s_BitmapHeaderByte, 22, 4); // Height
Buffer.BlockCopy(BitConverter.GetBytes(Planes), 0, s_BitmapHeaderByte, 26, 2); // Planes
Buffer.BlockCopy(BitConverter.GetBytes(BitCount), 0, s_BitmapHeaderByte, 28, 2); // Bit count
Buffer.BlockCopy(BitConverter.GetBytes((int)0), 0, s_BitmapHeaderByte, 30, 4); // Compression
Buffer.BlockCopy(BitConverter.GetBytes((int)0), 0, s_BitmapHeaderByte, 34, 4); // Image Size
Buffer.BlockCopy(BitConverter.GetBytes(DPI), 0, s_BitmapHeaderByte, 38, 4); // XpixelPerM
Buffer.BlockCopy(BitConverter.GetBytes(DPI), 0, s_BitmapHeaderByte, 42, 4); // YpixelPerM
Buffer.BlockCopy(BitConverter.GetBytes((int)0), 0, s_BitmapHeaderByte, 46, 4); // Colors Used
Buffer.BlockCopy(BitConverter.GetBytes((int)0), 0, s_BitmapHeaderByte, 50, 4); // Colors Important
}
public static byte[] GetBitmapFromRawData(IntPtr pixelData, int width, int height)
{
byte[] buffer = new byte[height * width * ColorsPerChannel + BitmapTotalHeaderSize];
Buffer.BlockCopy(s_BitmapHeaderByte, 0, buffer, 0, s_BitmapHeaderByte.Length);
Buffer.BlockCopy(BitConverter.GetBytes(buffer.Length), 0, buffer, 2, 4); // File Size
Buffer.BlockCopy(BitConverter.GetBytes((int)width), 0, buffer, 18, 4); // Width
Buffer.BlockCopy(BitConverter.GetBytes((int)height), 0, buffer, 22, 4); // Height
Marshal.Copy(pixelData, buffer, BitmapTotalHeaderSize, buffer.Length - BitmapTotalHeaderSize); // Write raw data
return buffer;
}
然后我将流设置为图像控件
byte[] buffer = BitmapBuilder.GetBitmapFromRawData(pixelData, (int)width, (int)height);
img.Source = ImageSource.FromStream(() => new MemoryStream(buffer));
它不适用于 IOS。
然后我保存了手动创建的位图并将其用作文件。它适用于 android 和 UWP,但不适用于 IOS。
string sFile = Path.Combine(documentDirectory, "testImage.bmp");
if (File.Exists(sFile))
{
img.Source = ImageSource.FromStream(() => File.OpenRead(sFile));
}
`
我很困惑,为什么 ImageSource.FromStream 似乎没有在 IOS 上工作。
我们决定使用 iOS 的依赖服务,并使用 CGImage 将原始像素数据转换为 PNG(而不是位图)。然后我们 return 来自 PNG 的流。