Parallel noise processing
This commit is contained in:
parent
9b07411ebd
commit
a4d7ccc1a5
5 changed files with 43 additions and 13 deletions
|
@ -2,9 +2,15 @@
|
|||
#include <iostream>
|
||||
#include <algorithm>
|
||||
#include <set>
|
||||
#include <thread>
|
||||
#include "worleynoise.h"
|
||||
#include "common/vector3d.h"
|
||||
|
||||
void WorleyNoise::runWorleyNoiseInThread(int xOffset, int xSize, WorleyNoise *noise)
|
||||
{
|
||||
noise->renderNoiseThread(xOffset, xSize);
|
||||
}
|
||||
|
||||
void WorleyNoise::generateNoise()
|
||||
{
|
||||
auto start = std::chrono::high_resolution_clock::now();
|
||||
|
@ -36,17 +42,21 @@ void WorleyNoise::generateNoise()
|
|||
noiseMap.clear();
|
||||
noiseMap.resize(size * size * size);
|
||||
|
||||
for (int x = 0; x < size; x++)
|
||||
int const nThreads = (int) std::thread::hardware_concurrency() - 1;
|
||||
int threadSize = std::floor((float) size / (float) nThreads);
|
||||
int remaining = size - nThreads * threadSize;
|
||||
std::vector<std::thread> threads;
|
||||
for (int n = 0; n < nThreads; n++)
|
||||
{
|
||||
for (int y = 0; y < size; y++)
|
||||
{
|
||||
for (int z = 0; z < size; z++)
|
||||
{
|
||||
Vector3d point = Vector3d(x, y, z);
|
||||
point /= (float) size;
|
||||
setNoise(x, y, z, distanceToClosestPoint(point));
|
||||
}
|
||||
threads.emplace_back(runWorleyNoiseInThread, n * threadSize, threadSize, this);
|
||||
}
|
||||
|
||||
renderNoiseThread(nThreads * threadSize, remaining);
|
||||
|
||||
// Rejoin the threads
|
||||
for (int t = 0; t < nThreads; ++t)
|
||||
{
|
||||
threads[t].join();
|
||||
}
|
||||
|
||||
// Normalize getNoise map to [0, 1]
|
||||
|
@ -66,6 +76,22 @@ void WorleyNoise::generateNoise()
|
|||
<< duration.count() << " seconds" << std::endl;
|
||||
}
|
||||
|
||||
void WorleyNoise::renderNoiseThread(int xOffset, int xSize)
|
||||
{
|
||||
for (int x = 0; x < xSize; x++)
|
||||
{
|
||||
for (int y = 0; y < size; y++)
|
||||
{
|
||||
for (int z = 0; z < size; z++)
|
||||
{
|
||||
Vector3d point = Vector3d(x, y, z);
|
||||
point /= (float) size;
|
||||
setNoise(xOffset + x, y, z, distanceToClosestPoint(point));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
WorleyNoise::WorleyNoise(int size, int numberOfPoints, unsigned int seed) : numberOfPoints(numberOfPoints), Noise(size)
|
||||
{
|
||||
// Init random
|
||||
|
|
|
@ -18,6 +18,8 @@ public:
|
|||
*/
|
||||
WorleyNoise(int size, int numberOfPoints, unsigned int seed = 0);
|
||||
|
||||
void renderNoiseThread(int xOffset, int xSize);
|
||||
|
||||
protected:
|
||||
int numberOfPoints;
|
||||
std::vector<Vector3d> points; // 3D-Array, each cell represents a subcell. There are numberOfPoints^3 subcells.
|
||||
|
@ -32,6 +34,8 @@ protected:
|
|||
void generateNoise();
|
||||
|
||||
std::vector<Vector3d> getSubcellPoints(Vector3d point);
|
||||
|
||||
static void runWorleyNoiseInThread(int xOffset, int xSize, WorleyNoise *noise);
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ int main()
|
|||
// Render the scene
|
||||
SuperRenderer sr;
|
||||
sr.setSuperSamplingFactor(1);
|
||||
sr.renderImage(scene, camera, 512, 512).save("result.png");
|
||||
sr.renderImage(scene, camera, 256, 256).save("result.png");
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -69,7 +69,7 @@ bool CloudShader::isTransparent() const
|
|||
}
|
||||
|
||||
CloudShader::CloudShader(const CloudSettings &settings) : settings(settings),
|
||||
cloudNoise(CloudNoise(NOISE_SIZE, settings.seed))
|
||||
cloudNoise(CloudNoise(settings.noiseSize, settings.seed))
|
||||
{
|
||||
cloudNoise.invert = true;
|
||||
}
|
||||
|
|
|
@ -7,11 +7,11 @@
|
|||
#include "primitive/primitive.h"
|
||||
#include "common/noise/worleynoise.h"
|
||||
|
||||
int const NOISE_SIZE = 64;
|
||||
float const TRANSMITTANCE_BREAK = 0.0001f; // If transmittance goes below this limit, the cloud is considered opaque
|
||||
|
||||
struct CloudSettings
|
||||
{
|
||||
int noiseSize = 256; // 64
|
||||
unsigned int seed = 0; // 0 for random seed
|
||||
float densitySteps = .2f; // .2f
|
||||
float scale = 30; // 30
|
||||
|
|
Loading…
Reference in a new issue