你可以从两个 np.float 一维数组创建一个 np.complex128 一维数组而不复制吗?
can you create a np.complex128 1d array from two np.float 1d arrays without copy?
设置:
我有两个来自共享内存的数组 reals
和 imags
:
#/usr/bin/env python2
reals = multiprocessing.RawArray('d', 10000000)
imags = multiprocessing.RawArray('d', 10000000)
然后我将它们设为 numpy
数组,命名为 reals2
和 imags2
,没有任何副本:
import numpy as np
reals2 = np.frombuffer(reals)
imags2 = np.frombuffer(imags)
# check if the objects did a copy
assert reals2.flags['OWNDATA'] is False
assert imags2.flags['OWNDATA'] is False
然后我想制作一个 np.complex128
一维数组 data
,同样不复制数据,但我不知道该怎么做。
问题:
你能从一对 float 数组中创建一个 np.complex128
一维数组 data
,而不复制,yes/no?
如果是,怎么做?
简答:没有。但是如果你控制发件人那么有一个不需要复制的解决方案。
更长的答案:
- 根据我的研究,我认为没有一种方法可以在不复制数据的情况下从两个单独的数组创建
numpy
复杂数组
- IMO 我认为你不能这样做,因为所有
numpy
编译的 c 代码都假定交错的真实图像数据
如果你控制了发送者,你不需要任何复制操作就可以得到你的数据。方法如下!
#!/usr/bin/env python2
import multiprocessing
import numpy as np
# parent process creates some data that needs to be shared with the child processes
data = np.random.randn(10) + 1.0j * np.random.randn(10)
assert data.dtype == np.complex128
# copy the data from the parent process to shared memory
shared_data = multiprocessing.RawArray('d', 2 * data.size)
shared_data[0::2] = data.real
shared_data[1::2] = data.imag
# simulate the child process getting only the shared_data
data2 = np.frombuffer(shared_data)
assert data2.flags['OWNDATA'] is False
assert data2.dtype == np.float64
assert data2.size == 2 * data.size
# convert reals to complex
data3 = data2.view(np.complex128)
assert data3.flags['OWNDATA'] is False
assert data3.dtype == np.complex128
assert data3.size == data.size
assert np.all(data3 == data)
# done - if no AssertionError then success
print 'success'
提示: 作为一个很好的起点。
下面是如何执行相同的处理,但启动了多个进程并从每个进程取回数据并验证返回的数据
#!/usr/bin/env python2
import multiprocessing
import os
# third-party
import numpy as np
# constants
# =========
N_POINTS = 3
N_THREADS = 4
# functions
# =========
def func(index, shared_data, results_dict):
# simulate the child process getting only the shared_data
data2 = np.frombuffer(shared_data)
assert data2.flags['OWNDATA'] is False
assert data2.dtype == np.float64
# convert reals to complex
data3 = data2.view(np.complex128)
assert data3.flags['OWNDATA'] is False
assert data3.dtype == np.complex128
print '[child.pid=%s,type=%s]: %s'%(os.getpid(), type(shared_data), data3)
# return the results in a SLOW but relatively easy way
results_dict[os.getpid()] = np.copy(data3) * index
# the script
# ==========
if __name__ == '__main__':
# parent process creates some data that needs to be shared with the child processes
data = np.random.randn(N_POINTS) + 1.0j * np.random.randn(N_POINTS)
assert data.dtype == np.complex128
# copy the data from the parent process to shared memory
shared_data = multiprocessing.RawArray('d', 2 * data.size)
shared_data[0::2] = data.real
shared_data[1::2] = data.imag
print '[parent]: ', type(shared_data), data
# do multiprocessing
manager = multiprocessing.Manager()
results_dict = manager.dict()
processes = []
for index in xrange(N_THREADS):
process = multiprocessing.Process(target=func, args=(index, shared_data, results_dict))
processes.append(process)
for process in processes:
process.start()
for process in processes:
process.join()
# get the results back from the processes
results = [results_dict[process.pid] for process in processes]
# verify the values from the processes
for index in xrange(N_THREADS):
result = results[index]
assert np.all(result == data * index)
del processes
# done
print 'success'
设置:
我有两个来自共享内存的数组 reals
和 imags
:
#/usr/bin/env python2
reals = multiprocessing.RawArray('d', 10000000)
imags = multiprocessing.RawArray('d', 10000000)
然后我将它们设为 numpy
数组,命名为 reals2
和 imags2
,没有任何副本:
import numpy as np
reals2 = np.frombuffer(reals)
imags2 = np.frombuffer(imags)
# check if the objects did a copy
assert reals2.flags['OWNDATA'] is False
assert imags2.flags['OWNDATA'] is False
然后我想制作一个 np.complex128
一维数组 data
,同样不复制数据,但我不知道该怎么做。
问题:
你能从一对 float 数组中创建一个 np.complex128
一维数组 data
,而不复制,yes/no?
如果是,怎么做?
简答:没有。但是如果你控制发件人那么有一个不需要复制的解决方案。
更长的答案:
- 根据我的研究,我认为没有一种方法可以在不复制数据的情况下从两个单独的数组创建
numpy
复杂数组 - IMO 我认为你不能这样做,因为所有
numpy
编译的 c 代码都假定交错的真实图像数据
如果你控制了发送者,你不需要任何复制操作就可以得到你的数据。方法如下!
#!/usr/bin/env python2
import multiprocessing
import numpy as np
# parent process creates some data that needs to be shared with the child processes
data = np.random.randn(10) + 1.0j * np.random.randn(10)
assert data.dtype == np.complex128
# copy the data from the parent process to shared memory
shared_data = multiprocessing.RawArray('d', 2 * data.size)
shared_data[0::2] = data.real
shared_data[1::2] = data.imag
# simulate the child process getting only the shared_data
data2 = np.frombuffer(shared_data)
assert data2.flags['OWNDATA'] is False
assert data2.dtype == np.float64
assert data2.size == 2 * data.size
# convert reals to complex
data3 = data2.view(np.complex128)
assert data3.flags['OWNDATA'] is False
assert data3.dtype == np.complex128
assert data3.size == data.size
assert np.all(data3 == data)
# done - if no AssertionError then success
print 'success'
提示: 作为一个很好的起点。
下面是如何执行相同的处理,但启动了多个进程并从每个进程取回数据并验证返回的数据
#!/usr/bin/env python2
import multiprocessing
import os
# third-party
import numpy as np
# constants
# =========
N_POINTS = 3
N_THREADS = 4
# functions
# =========
def func(index, shared_data, results_dict):
# simulate the child process getting only the shared_data
data2 = np.frombuffer(shared_data)
assert data2.flags['OWNDATA'] is False
assert data2.dtype == np.float64
# convert reals to complex
data3 = data2.view(np.complex128)
assert data3.flags['OWNDATA'] is False
assert data3.dtype == np.complex128
print '[child.pid=%s,type=%s]: %s'%(os.getpid(), type(shared_data), data3)
# return the results in a SLOW but relatively easy way
results_dict[os.getpid()] = np.copy(data3) * index
# the script
# ==========
if __name__ == '__main__':
# parent process creates some data that needs to be shared with the child processes
data = np.random.randn(N_POINTS) + 1.0j * np.random.randn(N_POINTS)
assert data.dtype == np.complex128
# copy the data from the parent process to shared memory
shared_data = multiprocessing.RawArray('d', 2 * data.size)
shared_data[0::2] = data.real
shared_data[1::2] = data.imag
print '[parent]: ', type(shared_data), data
# do multiprocessing
manager = multiprocessing.Manager()
results_dict = manager.dict()
processes = []
for index in xrange(N_THREADS):
process = multiprocessing.Process(target=func, args=(index, shared_data, results_dict))
processes.append(process)
for process in processes:
process.start()
for process in processes:
process.join()
# get the results back from the processes
results = [results_dict[process.pid] for process in processes]
# verify the values from the processes
for index in xrange(N_THREADS):
result = results[index]
assert np.all(result == data * index)
del processes
# done
print 'success'